From 5d42a2ec64357e8ffbca439b50c3a59840640738 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Thu, 15 Jul 2021 12:25:56 +0200 Subject: [PATCH 001/785] add modules for ps enhancements --- .gitmodules | 8 +++++++- tools/build.ps1 | 13 +++++++++---- vendor/powershell/BurntToast | 1 + vendor/powershell/PSWriteColor | 1 + vendor/powershell/README.md | 0 5 files changed, 18 insertions(+), 5 deletions(-) create mode 160000 vendor/powershell/BurntToast create mode 160000 vendor/powershell/PSWriteColor create mode 100644 vendor/powershell/README.md diff --git a/.gitmodules b/.gitmodules index 52f2fc0750..6c12c76b16 100644 --- a/.gitmodules +++ b/.gitmodules @@ -9,4 +9,10 @@ url = https://bitbucket.org/ftrack/ftrack-python-api.git [submodule "openpype/modules/ftrack/python2_vendor/arrow"] path = openpype/modules/ftrack/python2_vendor/arrow - url = https://github.com/arrow-py/arrow.git \ No newline at end of file + url = https://github.com/arrow-py/arrow.git +[submodule "vendor/powershell/BurntToast"] + path = vendor/powershell/BurntToast + url = https://github.com/Windos/BurntToast.git +[submodule "vendor/powershell/PSWriteColor"] + path = vendor/powershell/PSWriteColor + url = "https://github.com/EvotecIT/PSWriteColor.git" \ No newline at end of file diff --git a/tools/build.ps1 b/tools/build.ps1 index cc4253fe24..00cec54927 100644 --- a/tools/build.ps1 +++ b/tools/build.ps1 @@ -28,6 +28,13 @@ if($arguments -eq "--no-submodule-update") { $disable_submodule_update=$true } +$current_dir = Get-Location +$script_dir = Split-Path -Path $MyInvocation.MyCommand.Definition -Parent +$openpype_root = (Get-Item $script_dir).parent.FullName + +# Install PSWriteColor to support colorized output to terminal +$env:PSModulePath = $env:PSModulePath + ";$($openpype_root)\vendor\powershell" + function Start-Progress { param([ScriptBlock]$code) $scroll = "/-\|/-\|" @@ -110,10 +117,6 @@ Write-Host $art -ForegroundColor DarkGreen # Enable if PS 7.x is needed. # Show-PSWarning -$current_dir = Get-Location -$script_dir = Split-Path -Path $MyInvocation.MyCommand.Definition -Parent -$openpype_root = (Get-Item $script_dir).parent.FullName - $env:_INSIDE_OPENPYPE_TOOL = "1" if (-not (Test-Path 'env:POETRY_HOME')) { @@ -201,6 +204,8 @@ Write-Host "restoring current directory" Set-Location -Path $current_dir $endTime = [int][double]::Parse((Get-Date -UFormat %s)) +New-BurntToastNotification -AppLogo "$openpype_root/openpype/resources/icons/openpype_icon.png" -Text "OpenPype build complete!", "All done in $($endTime - $startTime) secs. You will find OpenPype and build log in build directory." + Write-Host "*** " -NoNewline -ForegroundColor Cyan Write-Host "All done in $($endTime - $startTime) secs. You will find OpenPype and build log in " -NoNewLine Write-Host "'.\build'" -NoNewline -ForegroundColor Green diff --git a/vendor/powershell/BurntToast b/vendor/powershell/BurntToast new file mode 160000 index 0000000000..ae0acdd870 --- /dev/null +++ b/vendor/powershell/BurntToast @@ -0,0 +1 @@ +Subproject commit ae0acdd870a2fd8d9f0d147de22dc36d6c5e399e diff --git a/vendor/powershell/PSWriteColor b/vendor/powershell/PSWriteColor new file mode 160000 index 0000000000..12eda384eb --- /dev/null +++ b/vendor/powershell/PSWriteColor @@ -0,0 +1 @@ +Subproject commit 12eda384ebd7a7954e15855e312215c009c97114 diff --git a/vendor/powershell/README.md b/vendor/powershell/README.md new file mode 100644 index 0000000000..e69de29bb2 From 0e6f4e330daa343b16a4b9bb1f6427eebf7706c0 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 16 Jul 2021 17:53:44 +0200 Subject: [PATCH 002/785] PS enhancement I. --- tools/build.ps1 | 63 ++++++++++-------------------- tools/build_win_installer.ps1 | 50 ++++++++++++------------ tools/create_env.ps1 | 72 ++++++++++++++++------------------- 3 files changed, 79 insertions(+), 106 deletions(-) diff --git a/tools/build.ps1 b/tools/build.ps1 index 00cec54927..89d2eac3a4 100644 --- a/tools/build.ps1 +++ b/tools/build.ps1 @@ -79,17 +79,14 @@ function Exit-WithCode($exitcode) { function Show-PSWarning() { if ($PSVersionTable.PSVersion.Major -lt 7) { - Write-Host "!!! " -NoNewline -ForegroundColor Red - Write-Host "You are using old version of PowerShell. $($PSVersionTable.PSVersion.Major).$($PSVersionTable.PSVersion.Minor)" - Write-Host "Please update to at least 7.0 - " -NoNewline -ForegroundColor Gray - Write-Host "https://github.com/PowerShell/PowerShell/releases" -ForegroundColor White + Write-Color -Text "!!! ", "You are using old version of PowerShell - ", "$($PSVersionTable.PSVersion.Major).$($PSVersionTable.PSVersion.Minor)" -Color Red, Yellow, White + Write-Color -Text " Please update to at least 7.0 - ", "https://github.com/PowerShell/PowerShell/releases" -Color Yellow, White Exit-WithCode 1 } } function Install-Poetry() { - Write-Host ">>> " -NoNewline -ForegroundColor Green - Write-Host "Installing Poetry ... " + Write-Color -Text ">>> ", "Installing Poetry ... " -Color Green, Gray $env:POETRY_HOME="$openpype_root\.poetry" (Invoke-WebRequest -Uri https://raw.githubusercontent.com/python-poetry/poetry/master/install-poetry.py -UseBasicParsing).Content | python - } @@ -129,8 +126,7 @@ $version_file = Get-Content -Path "$($openpype_root)\openpype\version.py" $result = [regex]::Matches($version_file, '__version__ = "(?\d+\.\d+.\d+.*)"') $openpype_version = $result[0].Groups['version'].Value if (-not $openpype_version) { - Write-Host "!!! " -ForegroundColor yellow -NoNewline - Write-Host "Cannot determine OpenPype version." + Write-Color -Text "!!! ", "Cannot determine OpenPype version." -Color Yellow, Gray Exit-WithCode 1 } @@ -139,74 +135,57 @@ if (-not (Test-Path -PathType Container -Path "$($openpype_root)\build")) { New-Item -ItemType Directory -Force -Path "$($openpype_root)\build" } -Write-Host "--- " -NoNewline -ForegroundColor yellow -Write-Host "Cleaning build directory ..." +Write-Color -Text "--- ", "Cleaning build directory ..." -Color Yellow, Gray try { Remove-Item -Recurse -Force "$($openpype_root)\build\*" } catch { - Write-Host "!!! " -NoNewline -ForegroundColor Red - Write-Host "Cannot clean build directory, possibly because process is using it." - Write-Host $_.Exception.Message + Write-Color -Text "!!! ", "Cannot clean build directory, possibly because process is using it." -Color Red, Gray + Write-Color -Text $_.Exception.Message -Color Red Exit-WithCode 1 } if (-not $disable_submodule_update) { - Write-Host ">>> " -NoNewLine -ForegroundColor green - Write-Host "Making sure submodules are up-to-date ..." - git submodule update --init --recursive + Write-Color -Text ">>> ", "Making sure submodules are up-to-date ..." -Color Green, Gray + & git submodule update --init --recursive } else { - Write-Host "*** " -NoNewLine -ForegroundColor yellow - Write-Host "Not updating submodules ..." + Write-Color -Text "*** ", "Not updating submodules ..." -Color Green, Gray } -Write-Host ">>> " -NoNewline -ForegroundColor green -Write-Host "OpenPype [ " -NoNewline -ForegroundColor white -Write-host $openpype_version -NoNewline -ForegroundColor green -Write-Host " ]" -ForegroundColor white +Write-Color -Text ">>> ", "OpenPype [ ", $openpype_version, " ]" -Color Green, White, Cyan, White -Write-Host ">>> " -NoNewline -ForegroundColor Green -Write-Host "Reading Poetry ... " -NoNewline +Write-Color -Text ">>> ", "Reading Poetry ... " -Color Green, Gray -NoNewline if (-not (Test-Path -PathType Container -Path "$($env:POETRY_HOME)\bin")) { - Write-Host "NOT FOUND" -ForegroundColor Yellow - Write-Host "*** " -NoNewline -ForegroundColor Yellow - Write-Host "We need to install Poetry create virtual env first ..." + Write-Color -Text "NOT FOUND" -Color Yellow + Write-Color -Text "*** ", "We need to install Poetry create virtual env first ..." -Color Yellow, Gray & "$openpype_root\tools\create_env.ps1" } else { - Write-Host "OK" -ForegroundColor Green + Write-Color -Text "OK" -Color Green } -Write-Host ">>> " -NoNewline -ForegroundColor green -Write-Host "Cleaning cache files ... " -NoNewline +Write-Color -Text ">>> ", "Cleaning cache files ... " -Color Green, Gray -NoNewline Get-ChildItem $openpype_root -Filter "*.pyc" -Force -Recurse | Where-Object { $_.FullName -inotmatch 'build' } | Remove-Item -Force Get-ChildItem $openpype_root -Filter "*.pyo" -Force -Recurse | Where-Object { $_.FullName -inotmatch 'build' } | Remove-Item -Force Get-ChildItem $openpype_root -Filter "__pycache__" -Force -Recurse | Where-Object { $_.FullName -inotmatch 'build' } | Remove-Item -Force -Recurse -Write-Host "OK" -ForegroundColor green +Write-Color -Text "OK" -Color green -Write-Host ">>> " -NoNewline -ForegroundColor green -Write-Host "Building OpenPype ..." +Write-Color -Text ">>> ", "Building OpenPype ..." -Color Green, White $startTime = [int][double]::Parse((Get-Date -UFormat %s)) $out = & "$($env:POETRY_HOME)\bin\poetry" run python setup.py build 2>&1 Set-Content -Path "$($openpype_root)\build\build.log" -Value $out if ($LASTEXITCODE -ne 0) { - Write-Host "!!! " -NoNewLine -ForegroundColor Red - Write-Host "Build failed. Check the log: " -NoNewline - Write-Host ".\build\build.log" -ForegroundColor Yellow + Write-Color -Text "!!! ", "Build failed. Check the log: ", ".\build\build.log" -Color Red, Yellow, White Exit-WithCode $LASTEXITCODE } Set-Content -Path "$($openpype_root)\build\build.log" -Value $out & "$($env:POETRY_HOME)\bin\poetry" run python "$($openpype_root)\tools\build_dependencies.py" -Write-Host ">>> " -NoNewline -ForegroundColor green -Write-Host "restoring current directory" +Write-Color -Text ">>> ", "Restoring current directory" -Color Green, Gray Set-Location -Path $current_dir $endTime = [int][double]::Parse((Get-Date -UFormat %s)) New-BurntToastNotification -AppLogo "$openpype_root/openpype/resources/icons/openpype_icon.png" -Text "OpenPype build complete!", "All done in $($endTime - $startTime) secs. You will find OpenPype and build log in build directory." -Write-Host "*** " -NoNewline -ForegroundColor Cyan -Write-Host "All done in $($endTime - $startTime) secs. You will find OpenPype and build log in " -NoNewLine -Write-Host "'.\build'" -NoNewline -ForegroundColor Green -Write-Host " directory." +Write-Color -Text "*** ", "All done in ", $($endTime - $startTime), " secs. You will find OpenPype and build log in ", "'.\build'", " directory." -Color Green, Gray, White, Gray, White, Gray diff --git a/tools/build_win_installer.ps1 b/tools/build_win_installer.ps1 index a0832e0135..287e5c751f 100644 --- a/tools/build_win_installer.ps1 +++ b/tools/build_win_installer.ps1 @@ -11,6 +11,12 @@ PS> .\build_win_installer.ps1 #> +$current_dir = Get-Location +$script_dir = Split-Path -Path $MyInvocation.MyCommand.Definition -Parent +$openpype_root = (Get-Item $script_dir).parent.FullName + +# Install PSWriteColor to support colorized output to terminal +$env:PSModulePath = $env:PSModulePath + ";$($openpype_root)\vendor\powershell" function Start-Progress { param([ScriptBlock]$code) @@ -44,7 +50,6 @@ function Start-Progress { #> } - function Exit-WithCode($exitcode) { # Only exit this host process if it's a child of another PowerShell parent process... $parentPID = (Get-CimInstance -ClassName Win32_Process -Filter "ProcessId=$PID" | Select-Object -Property ParentProcessId).ParentProcessId @@ -56,10 +61,8 @@ function Exit-WithCode($exitcode) { function Show-PSWarning() { if ($PSVersionTable.PSVersion.Major -lt 7) { - Write-Host "!!! " -NoNewline -ForegroundColor Red - Write-Host "You are using old version of PowerShell. $($PSVersionTable.PSVersion.Major).$($PSVersionTable.PSVersion.Minor)" - Write-Host "Please update to at least 7.0 - " -NoNewline -ForegroundColor Gray - Write-Host "https://github.com/PowerShell/PowerShell/releases" -ForegroundColor White + Write-Color -Text "!!! ", "You are using old version of PowerShell - ", "$($PSVersionTable.PSVersion.Major).$($PSVersionTable.PSVersion.Minor)" -Color Red, Yellow, White + Write-Color -Text " Please update to at least 7.0 - ", "https://github.com/PowerShell/PowerShell/releases" -Color Yellow, White Exit-WithCode 1 } } @@ -87,9 +90,6 @@ Write-Host $art -ForegroundColor DarkGreen # Enable if PS 7.x is needed. # Show-PSWarning -$current_dir = Get-Location -$script_dir = Split-Path -Path $MyInvocation.MyCommand.Definition -Parent -$openpype_root = (Get-Item $script_dir).parent.FullName Set-Location -Path $openpype_root @@ -97,16 +97,15 @@ $version_file = Get-Content -Path "$($openpype_root)\openpype\version.py" $result = [regex]::Matches($version_file, '__version__ = "(?\d+\.\d+.\d+.*)"') $openpype_version = $result[0].Groups['version'].Value if (-not $openpype_version) { - Write-Host "!!! " -ForegroundColor yellow -NoNewline - Write-Host "Cannot determine OpenPype version." + Write-Color -Text "!!! ", "Cannot determine OpenPype version." -Color Yellow, Gray Exit-WithCode 1 } + $env:BUILD_VERSION = $openpype_version iscc -Write-Host ">>> " -NoNewline -ForegroundColor green -Write-Host "Creating OpenPype installer ... " -ForegroundColor white +Write-Color -Text ">>> ", "Creating OpenPype installer ... " -Color Green, White $build_dir_command = @" import sys @@ -115,24 +114,25 @@ print('exe.{}-{}'.format(get_platform(), sys.version[0:3])) "@ $build_dir = & python -c $build_dir_command -Write-Host "Build directory ... ${build_dir}" -ForegroundColor white +Write-Color -Text "--- ", "Build directory ", "${build_dir}" -Color Green, Gray, White $env:BUILD_DIR = $build_dir -if (Get-Command iscc -errorAction SilentlyContinue -ErrorVariable ProcessError) -{ - iscc "$openpype_root\inno_setup.iss" -}else { - Write-Host "!!! Cannot find Inno Setup command" -ForegroundColor red - Write-Host "!!! You can download it at https://jrsoftware.org/" -ForegroundColor red +if (-not (Get-Command iscc -errorAction SilentlyContinue -ErrorVariable ProcessError)) { + Write-Color -Text "!!! ", "Cannot find Inno Setup command" -Color Red, Yellow + Write-Color "!!! You can download it at https://jrsoftware.org/" -ForegroundColor red Exit-WithCode 1 } +& iscc "$openpype_root\inno_setup.iss" -Write-Host ">>> " -NoNewline -ForegroundColor green -Write-Host "restoring current directory" +if ($LASTEXITCODE -ne 0) { + Write-Color -Text "!!! ", "Creating installer failed." -Color Red, Yellow + Exit-WithCode 1 +} + +Write-Color -Text ">>> ", "Restoring current directory" -Color Green, Gray Set-Location -Path $current_dir -Write-Host "*** " -NoNewline -ForegroundColor Cyan -Write-Host "All done. You will find OpenPype installer in " -NoNewLine -Write-Host "'.\build'" -NoNewline -ForegroundColor Green -Write-Host " directory." +New-BurntToastNotification -AppLogo "$openpype_root/openpype/resources/icons/openpype_icon.png" -Text "OpenPype build complete!", "All done. You will find You will find OpenPype installer in '.\build' directory." + +Write-Color -Text "*** ", "All done. You will find OpenPype installer in ", "'.\build'", " directory." -Color Green, Gray, White, Gray diff --git a/tools/create_env.ps1 b/tools/create_env.ps1 index 6c8124ccb2..6315b7b27c 100644 --- a/tools/create_env.ps1 +++ b/tools/create_env.ps1 @@ -24,6 +24,13 @@ if($arguments -eq "--verbose") { $poetry_verbosity="-vvv" } +$current_dir = Get-Location +$script_dir = Split-Path -Path $MyInvocation.MyCommand.Definition -Parent +$openpype_root = (Get-Item $script_dir).parent.FullName + +# Install PSWriteColor to support colorized output to terminal +$env:PSModulePath = $env:PSModulePath + ";$($openpype_root)\vendor\powershell" + function Exit-WithCode($exitcode) { # Only exit this host process if it's a child of another PowerShell parent process... $parentPID = (Get-CimInstance -ClassName Win32_Process -Filter "ProcessId=$PID" | Select-Object -Property ParentProcessId).ParentProcessId @@ -36,28 +43,24 @@ function Exit-WithCode($exitcode) { function Show-PSWarning() { if ($PSVersionTable.PSVersion.Major -lt 7) { - Write-Host "!!! " -NoNewline -ForegroundColor Red - Write-Host "You are using old version of PowerShell. $($PSVersionTable.PSVersion.Major).$($PSVersionTable.PSVersion.Minor)" - Write-Host "Please update to at least 7.0 - " -NoNewline -ForegroundColor Gray - Write-Host "https://github.com/PowerShell/PowerShell/releases" -ForegroundColor White + Write-Color -Text "!!! ", "You are using old version of PowerShell - ", "$($PSVersionTable.PSVersion.Major).$($PSVersionTable.PSVersion.Minor)" -Color Red, Yellow, White + Write-Color -Text " Please update to at least 7.0 - ", "https://github.com/PowerShell/PowerShell/releases" -Color Yellow, White Exit-WithCode 1 } } function Install-Poetry() { - Write-Host ">>> " -NoNewline -ForegroundColor Green - Write-Host "Installing Poetry ... " + Write-Color -Text ">>> ", "Installing Poetry ... " -Color Green, Gray $env:POETRY_HOME="$openpype_root\.poetry" (Invoke-WebRequest -Uri https://raw.githubusercontent.com/python-poetry/poetry/master/install-poetry.py -UseBasicParsing).Content | python - } function Test-Python() { - Write-Host ">>> " -NoNewline -ForegroundColor green - Write-Host "Detecting host Python ... " -NoNewline + Write-Color -Text ">>> ", "Detecting host Python ... " -Color Green, Gray -NoNewline if (-not (Get-Command "python" -ErrorAction SilentlyContinue)) { - Write-Host "!!! Python not detected" -ForegroundColor red + Write-Color -Text "!!! ", "Python not detected" -Color Red, Yellow Set-Location -Path $current_dir Exit-WithCode 1 } @@ -70,28 +73,24 @@ print('{0}.{1}'.format(sys.version_info[0], sys.version_info[1])) $env:PYTHON_VERSION = $p $m = $p -match '(\d+)\.(\d+)' if(-not $m) { - Write-Host "!!! Cannot determine version" -ForegroundColor red - Set-Location -Path $current_dir - Exit-WithCode 1 + Write-Color -Text "FAILED " -Color Red + Write-Color -Text "!!! ", "Cannot determine version" -Color Red, Yellow + Set-Location -Path $current_dir + Exit-WithCode 1 } # We are supporting python 3.7 only if (($matches[1] -lt 3) -or ($matches[2] -lt 7)) { - Write-Host "FAILED Version [ $p ] is old and unsupported" -ForegroundColor red + Write-Color -Text "FAILED ", "Version ", "[", $p ,"]", "is old and unsupported" -Color Red, Yellow, Cyan, White, Cyan, Yellow Set-Location -Path $current_dir Exit-WithCode 1 } elseif (($matches[1] -eq 3) -and ($matches[2] -gt 7)) { - Write-Host "WARNING Version [ $p ] is unsupported, use at your own risk." -ForegroundColor yellow - Write-Host "*** " -NoNewline -ForegroundColor yellow - Write-Host "OpenPype supports only Python 3.7" -ForegroundColor white + Write-Color -Text "WARNING Version ", "[", $p, "]", " is unsupported, use at your own risk." -Color Yellow, Cyan, White, Cyan, Yellow + Write-Color -Text "*** ", "OpenPype supports only Python 3.7" -Color Yellow, White } else { - Write-Host "OK [ $p ]" -ForegroundColor green + Write-Color "OK ", "[", $p, "]" -Color Green, Cyan, White, Cyan } } -$current_dir = Get-Location -$script_dir = Split-Path -Path $MyInvocation.MyCommand.Definition -Parent -$openpype_root = (Get-Item $script_dir).parent.FullName - if (-not (Test-Path 'env:POETRY_HOME')) { $env:POETRY_HOME = "$openpype_root\.poetry" } @@ -129,41 +128,36 @@ $version_file = Get-Content -Path "$($openpype_root)\openpype\version.py" $result = [regex]::Matches($version_file, '__version__ = "(?\d+\.\d+.\d+.*)"') $openpype_version = $result[0].Groups['version'].Value if (-not $openpype_version) { - Write-Host "!!! " -ForegroundColor yellow -NoNewline - Write-Host "Cannot determine OpenPype version." + Write-Color -Text "!!! ", "Cannot determine OpenPype version." -Color Red, Yellow Set-Location -Path $current_dir Exit-WithCode 1 } -Write-Host ">>> " -NoNewline -ForegroundColor Green -Write-Host "Found OpenPype version " -NoNewline -Write-Host "[ $($openpype_version) ]" -ForegroundColor Green +Write-Color -Text ">>> ", "Found OpenPype version ", "[ ", $($openpype_version), " ]" -Color Green, Gray, Cyan, White, Cyan Test-Python -Write-Host ">>> " -NoNewline -ForegroundColor Green -Write-Host "Reading Poetry ... " -NoNewline +Write-Color -Text ">>> ", "Reading Poetry ... " -Color Green, Gray -NoNewline if (-not (Test-Path -PathType Container -Path "$($env:POETRY_HOME)\bin")) { - Write-Host "NOT FOUND" -ForegroundColor Yellow + Write-Color -Text "NOT FOUND" -Color Yellow Install-Poetry - Write-Host "INSTALLED" -ForegroundColor Cyan + Write-Color -Text "INSTALLED" -Color Cyan } else { - Write-Host "OK" -ForegroundColor Green + Write-Color -Text "OK" -Color Green } if (-not (Test-Path -PathType Leaf -Path "$($openpype_root)\poetry.lock")) { - Write-Host ">>> " -NoNewline -ForegroundColor green - Write-Host "Installing virtual environment and creating lock." + Write-Color -Text ">>> ", "Installing virtual environment and creating lock." -Color Green, Gray } else { - Write-Host ">>> " -NoNewline -ForegroundColor green - Write-Host "Installing virtual environment from lock." + Write-Color -Text ">>> ", "Installing virtual environment from lock." -Color Green, Gray } & "$env:POETRY_HOME\bin\poetry" install --no-root $poetry_verbosity --ansi if ($LASTEXITCODE -ne 0) { - Write-Host "!!! " -ForegroundColor yellow -NoNewline - Write-Host "Poetry command failed." + Write-Color -Text "!!! ", "Poetry command failed." -Color Red, Yellow Set-Location -Path $current_dir Exit-WithCode 1 } Set-Location -Path $current_dir -Write-Host ">>> " -NoNewline -ForegroundColor green -Write-Host "Virtual environment created." + +New-BurntToastNotification -AppLogo "$openpype_root/openpype/resources/icons/openpype_icon.png" -Text "OpenPype", "Virtual environment created." + +Write-Color -Text ">>> ", "Virtual environment created." -Color Green, White From e7e9f461eeed634f0cba4b931fe8ea33814f26b4 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Thu, 2 Sep 2021 14:55:04 +0200 Subject: [PATCH 003/785] remove submodules --- openpype/modules/ftrack/python2_vendor/arrow | 1 - openpype/modules/ftrack/python2_vendor/ftrack-python-api | 1 - 2 files changed, 2 deletions(-) delete mode 160000 openpype/modules/ftrack/python2_vendor/arrow delete mode 160000 openpype/modules/ftrack/python2_vendor/ftrack-python-api diff --git a/openpype/modules/ftrack/python2_vendor/arrow b/openpype/modules/ftrack/python2_vendor/arrow deleted file mode 160000 index b746fedf72..0000000000 --- a/openpype/modules/ftrack/python2_vendor/arrow +++ /dev/null @@ -1 +0,0 @@ -Subproject commit b746fedf7286c3755a46f07ab72f4c414cd41fc0 diff --git a/openpype/modules/ftrack/python2_vendor/ftrack-python-api b/openpype/modules/ftrack/python2_vendor/ftrack-python-api deleted file mode 160000 index d277f474ab..0000000000 --- a/openpype/modules/ftrack/python2_vendor/ftrack-python-api +++ /dev/null @@ -1 +0,0 @@ -Subproject commit d277f474ab016e7b53479c36af87cb861d0cc53e From 757af0d36eee3a35d0f2de5ce131fc4e3a9327c5 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Thu, 2 Sep 2021 23:31:34 +0200 Subject: [PATCH 004/785] yet more scripts --- tools/run_mongo.ps1 | 22 +++++++-------- tools/run_project_manager.ps1 | 15 +++++----- tools/run_settings.ps1 | 15 +++++----- tools/run_tests.ps1 | 53 +++++++++++++---------------------- tools/run_tray.ps1 | 13 +++++---- 5 files changed, 54 insertions(+), 64 deletions(-) diff --git a/tools/run_mongo.ps1 b/tools/run_mongo.ps1 index 32f6cfed17..0f26e86579 100644 --- a/tools/run_mongo.ps1 +++ b/tools/run_mongo.ps1 @@ -11,6 +11,13 @@ PS> .\run_mongo.ps1 #> +$current_dir = Get-Location +$script_dir = Split-Path -Path $MyInvocation.MyCommand.Definition -Parent +$openpype_root = (Get-Item $script_dir).parent.FullName + +# Install PSWriteColor to support colorized output to terminal +$env:PSModulePath = $env:PSModulePath + ";$($openpype_root)\vendor\powershell" + $art = @" . . .. . .. @@ -43,8 +50,7 @@ function Exit-WithCode($exitcode) { function Find-Mongo ($preferred_version) { $defaultPath = "C:\Program Files\MongoDB\Server" - Write-Host ">>> " -NoNewLine -ForegroundColor Green - Write-Host "Detecting MongoDB ... " -NoNewline + Write-Color -Text ">>> ", "Detecting MongoDB ... " -Color Geen, Gray -NoNewline if (-not (Get-Command "mongod" -ErrorAction SilentlyContinue)) { if(Test-Path "$($defaultPath)\*\bin\mongod.exe" -PathType Leaf) { # we have mongo server installed on standard Windows location @@ -52,17 +58,14 @@ function Find-Mongo ($preferred_version) { # $preferred_version. $mongoVersions = Get-ChildItem -Directory 'C:\Program Files\MongoDB\Server' | Sort-Object -Property {$_.Name -as [int]} if(Test-Path "$($mongoVersions[-1])\bin\mongod.exe" -PathType Leaf) { - Write-Host "OK" -ForegroundColor Green + Write-Color -Text "OK" -Color Green $use_version = $mongoVersions[-1] foreach ($v in $mongoVersions) { - Write-Host " - found [ " -NoNewline - Write-Host $v -NoNewLine -ForegroundColor Cyan - Write-Host " ]" -NoNewLine - + Write-Color -Text " - found [ ", $v, " ]" - Color Cyan, White, Cyan -NoNewLine $version = Split-Path $v -Leaf if ($preferred_version -eq $version) { - Write-Host " *" -ForegroundColor Green + Write-Color -Text " *" -Color Green $use_version = $v } else { Write-Host "" @@ -104,9 +107,6 @@ function Find-Mongo ($preferred_version) { #> } -$script_dir = Split-Path -Path $MyInvocation.MyCommand.Definition -Parent -$openpype_root = (Get-Item $script_dir).parent.FullName - # mongodb port $port = 2707 diff --git a/tools/run_project_manager.ps1 b/tools/run_project_manager.ps1 index a9cfbb1e7b..2932358c2a 100644 --- a/tools/run_project_manager.ps1 +++ b/tools/run_project_manager.ps1 @@ -35,6 +35,9 @@ $current_dir = Get-Location $script_dir = Split-Path -Path $MyInvocation.MyCommand.Definition -Parent $openpype_root = (Get-Item $script_dir).parent.FullName +# Install PSWriteColor to support colorized output to terminal +$env:PSModulePath = $env:PSModulePath + ";$($openpype_root)\vendor\powershell" + $env:_INSIDE_OPENPYPE_TOOL = "1" # make sure Poetry is in PATH @@ -45,15 +48,13 @@ $env:PATH = "$($env:PATH);$($env:POETRY_HOME)\bin" Set-Location -Path $openpype_root -Write-Host ">>> " -NoNewline -ForegroundColor Green -Write-Host "Reading Poetry ... " -NoNewline +Write-Color -Text ">>> ", "Reading Poetry ... " -Color Green, Gray -NoNewline if (-not (Test-Path -PathType Container -Path "$($env:POETRY_HOME)\bin")) { - Write-Host "NOT FOUND" -ForegroundColor Yellow - Write-Host "*** " -NoNewline -ForegroundColor Yellow - Write-Host "We need to install Poetry create virtual env first ..." - & "$openpype_root\tools\create_env.ps1" + Write-Color -Text "NOT FOUND" -Color Yellow + Install-Poetry + Write-Color -Text "INSTALLED" -Color Cyan } else { - Write-Host "OK" -ForegroundColor Green + Write-Color -Text "OK" -Color Green } & "$env:POETRY_HOME\bin\poetry" run python "$($openpype_root)\start.py" projectmanager diff --git a/tools/run_settings.ps1 b/tools/run_settings.ps1 index 1c0aa6e8f3..918ea367ab 100644 --- a/tools/run_settings.ps1 +++ b/tools/run_settings.ps1 @@ -15,6 +15,9 @@ $current_dir = Get-Location $script_dir = Split-Path -Path $MyInvocation.MyCommand.Definition -Parent $openpype_root = (Get-Item $script_dir).parent.FullName +# Install PSWriteColor to support colorized output to terminal +$env:PSModulePath = $env:PSModulePath + ";$($openpype_root)\vendor\powershell" + $env:_INSIDE_OPENPYPE_TOOL = "1" # make sure Poetry is in PATH @@ -25,15 +28,13 @@ $env:PATH = "$($env:PATH);$($env:POETRY_HOME)\bin" Set-Location -Path $openpype_root -Write-Host ">>> " -NoNewline -ForegroundColor Green -Write-Host "Reading Poetry ... " -NoNewline +Write-Color -Text ">>> ", "Reading Poetry ... " -Color Green, Gray -NoNewline if (-not (Test-Path -PathType Container -Path "$($env:POETRY_HOME)\bin")) { - Write-Host "NOT FOUND" -ForegroundColor Yellow - Write-Host "*** " -NoNewline -ForegroundColor Yellow - Write-Host "We need to install Poetry create virtual env first ..." - & "$openpype_root\tools\create_env.ps1" + Write-Color -Text "NOT FOUND" -Color Yellow + Install-Poetry + Write-Color -Text "INSTALLED" -Color Cyan } else { - Write-Host "OK" -ForegroundColor Green + Write-Color -Text "OK" -Color Green } & "$env:POETRY_HOME\bin\poetry" run python "$($openpype_root)\start.py" settings --dev diff --git a/tools/run_tests.ps1 b/tools/run_tests.ps1 index e631cb72df..7995c6a8e9 100644 --- a/tools/run_tests.ps1 +++ b/tools/run_tests.ps1 @@ -11,6 +11,13 @@ PS> .\run_test.ps1 #> +$current_dir = Get-Location +$script_dir = Split-Path -Path $MyInvocation.MyCommand.Definition -Parent +$openpype_root = (Get-Item $script_dir).parent.FullName + +# Install PSWriteColor to support colorized output to terminal +$env:PSModulePath = $env:PSModulePath + ";$($openpype_root)\vendor\powershell" + function Exit-WithCode($exitcode) { # Only exit this host process if it's a child of another PowerShell parent process... $parentPID = (Get-CimInstance -ClassName Win32_Process -Filter "ProcessId=$PID" | Select-Object -Property ParentProcessId).ParentProcessId @@ -22,10 +29,8 @@ function Exit-WithCode($exitcode) { function Show-PSWarning() { if ($PSVersionTable.PSVersion.Major -lt 7) { - Write-Host "!!! " -NoNewline -ForegroundColor Red - Write-Host "You are using old version of PowerShell. $($PSVersionTable.PSVersion.Major).$($PSVersionTable.PSVersion.Minor)" - Write-Host "Please update to at least 7.0 - " -NoNewline -ForegroundColor Gray - Write-Host "https://github.com/PowerShell/PowerShell/releases" -ForegroundColor White + Write-Color -Text "!!! ", "You are using old version of PowerShell - ", "$($PSVersionTable.PSVersion.Major).$($PSVersionTable.PSVersion.Minor)" -Color Red, Yellow, White + Write-Color -Text " Please update to at least 7.0 - ", "https://github.com/PowerShell/PowerShell/releases" -Color Yellow, White Exit-WithCode 1 } } @@ -53,10 +58,6 @@ Write-Host $art -ForegroundColor DarkGreen # Enable if PS 7.x is needed. # Show-PSWarning -$current_dir = Get-Location -$script_dir = Split-Path -Path $MyInvocation.MyCommand.Definition -Parent -$openpype_root = (Get-Item $script_dir).parent.FullName - $env:_INSIDE_OPENPYPE_TOOL = "1" if (-not (Test-Path 'env:POETRY_HOME')) { @@ -69,46 +70,32 @@ $version_file = Get-Content -Path "$($openpype_root)\openpype\version.py" $result = [regex]::Matches($version_file, '__version__ = "(?\d+\.\d+.\d+.*)"') $openpype_version = $result[0].Groups['version'].Value if (-not $openpype_version) { - Write-Host "!!! " -ForegroundColor yellow -NoNewline - Write-Host "Cannot determine OpenPype version." + Write-Color -Text "!!! ", "Cannot determine OpenPype version." -Color Yellow, Gray Exit-WithCode 1 } -Write-Host ">>> " -NoNewline -ForegroundColor green -Write-Host "OpenPype [ " -NoNewline -ForegroundColor white -Write-host $openpype_version -NoNewline -ForegroundColor green -Write-Host " ] ..." -ForegroundColor white +Write-Color -Text ">>> ", "OpenPype [ ", $openpype_version, " ]" -Color Green, White, Cyan, White -Write-Host ">>> " -NoNewline -ForegroundColor Green -Write-Host "Reading Poetry ... " -NoNewline +Write-Color -Text ">>> ", "Reading Poetry ... " -Color Green, Gray -NoNewline if (-not (Test-Path -PathType Container -Path "$($env:POETRY_HOME)\bin")) { - Write-Host "NOT FOUND" -ForegroundColor Yellow - Write-Host "*** " -NoNewline -ForegroundColor Yellow - Write-Host "We need to install Poetry create virtual env first ..." + Write-Color -Text "NOT FOUND" -Color Yellow + Write-Color -Text "*** ", "We need to install Poetry create virtual env first ..." -Color Yellow, Gray & "$openpype_root\tools\create_env.ps1" } else { - Write-Host "OK" -ForegroundColor Green + Write-Color -Text "OK" -Color Green } -Write-Host ">>> " -NoNewline -ForegroundColor green -Write-Host "Cleaning cache files ... " -NoNewline +Write-Color -Text ">>> ", "Cleaning cache files ... " -Color Green, Gray -NoNewline Get-ChildItem $openpype_root -Filter "*.pyc" -Force -Recurse | Where-Object { $_.FullName -inotmatch 'build' } | Remove-Item -Force +Get-ChildItem $openpype_root -Filter "*.pyo" -Force -Recurse | Where-Object { $_.FullName -inotmatch 'build' } | Remove-Item -Force Get-ChildItem $openpype_root -Filter "__pycache__" -Force -Recurse | Where-Object { $_.FullName -inotmatch 'build' } | Remove-Item -Force -Recurse -Write-Host "OK" -ForegroundColor green +Write-Color -Text "OK" -Color green -Write-Host ">>> " -NoNewline -ForegroundColor green -Write-Host "Testing OpenPype ..." +Write-Color -Text ">>> ", "Testing OpenPype ..." -Color Green, White $original_pythonpath = $env:PYTHONPATH $env:PYTHONPATH="$($openpype_root);$($env:PYTHONPATH)" & "$env:POETRY_HOME\bin\poetry" run pytest -x --capture=sys --print -W ignore::DeprecationWarning "$($openpype_root)/tests" $env:PYTHONPATH = $original_pythonpath -Write-Host ">>> " -NoNewline -ForegroundColor green -Write-Host "restoring current directory" +Write-Color -Text ">>> ", "Restoring current directory" -Color Green, Gray Set-Location -Path $current_dir - - - - - - diff --git a/tools/run_tray.ps1 b/tools/run_tray.ps1 index 872c1524a6..7dee3d0064 100644 --- a/tools/run_tray.ps1 +++ b/tools/run_tray.ps1 @@ -14,6 +14,9 @@ $current_dir = Get-Location $script_dir = Split-Path -Path $MyInvocation.MyCommand.Definition -Parent $openpype_root = (Get-Item $script_dir).parent.FullName +# Install PSWriteColor to support colorized output to terminal +$env:PSModulePath = $env:PSModulePath + ";$($openpype_root)\vendor\powershell" + $env:_INSIDE_OPENPYPE_TOOL = "1" # make sure Poetry is in PATH @@ -24,15 +27,13 @@ $env:PATH = "$($env:PATH);$($env:POETRY_HOME)\bin" Set-Location -Path $openpype_root -Write-Host ">>> " -NoNewline -ForegroundColor Green -Write-Host "Reading Poetry ... " -NoNewline +Write-Color -Text ">>> ", "Reading Poetry ... " -Color Green, Gray -NoNewline if (-not (Test-Path -PathType Container -Path "$($env:POETRY_HOME)\bin")) { - Write-Host "NOT FOUND" -ForegroundColor Yellow - Write-Host "*** " -NoNewline -ForegroundColor Yellow - Write-Host "We need to install Poetry create virtual env first ..." + Write-Color -Text "NOT FOUND" -Color Yellow + Write-Color -Text "*** ", "We need to install Poetry create virtual env first ..." -Color Yellow, Gray & "$openpype_root\tools\create_env.ps1" } else { - Write-Host "OK" -ForegroundColor Green + Write-Color -Text "OK" -Color Green } & "$($env:POETRY_HOME)\bin\poetry" run python "$($openpype_root)\start.py" tray --debug From cd468f567331cc8125318b92a62259cf621e48db Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 3 Sep 2021 12:10:55 +0200 Subject: [PATCH 005/785] rest of the scripts converted --- tools/create_zip.ps1 | 40 +++++++++++++++------------------ tools/fetch_thirdparty_libs.ps1 | 13 ++++++----- tools/make_docs.ps1 | 27 ++++++++++++---------- tools/run_mongo.ps1 | 33 ++++++++------------------- 4 files changed, 49 insertions(+), 64 deletions(-) diff --git a/tools/create_zip.ps1 b/tools/create_zip.ps1 index c27857b480..3796186dd0 100644 --- a/tools/create_zip.ps1 +++ b/tools/create_zip.ps1 @@ -19,6 +19,13 @@ PS> .\create_zip.ps1 --path C:\OpenPype #> +$current_dir = Get-Location +$script_dir = Split-Path -Path $MyInvocation.MyCommand.Definition -Parent +$openpype_root = (Get-Item $script_dir).parent.FullName + +# Install PSWriteColor to support colorized output to terminal +$env:PSModulePath = $env:PSModulePath + ";$($openpype_root)\vendor\powershell" + function Exit-WithCode($exitcode) { # Only exit this host process if it's a child of another PowerShell parent process... $parentPID = (Get-CimInstance -ClassName Win32_Process -Filter "ProcessId=$PID" | Select-Object -Property ParentProcessId).ParentProcessId @@ -31,18 +38,12 @@ function Exit-WithCode($exitcode) { function Show-PSWarning() { if ($PSVersionTable.PSVersion.Major -lt 7) { - Write-Host "!!! " -NoNewline -ForegroundColor Red - Write-Host "You are using old version of PowerShell. $($PSVersionTable.PSVersion.Major).$($PSVersionTable.PSVersion.Minor)" - Write-Host "Please update to at least 7.0 - " -NoNewline -ForegroundColor Gray - Write-Host "https://github.com/PowerShell/PowerShell/releases" -ForegroundColor White + Write-Color -Text "!!! ", "You are using old version of PowerShell - ", "$($PSVersionTable.PSVersion.Major).$($PSVersionTable.PSVersion.Minor)" -Color Red, Yellow, White + Write-Color -Text " Please update to at least 7.0 - ", "https://github.com/PowerShell/PowerShell/releases" -Color Yellow, White Exit-WithCode 1 } } -$current_dir = Get-Location -$script_dir = Split-Path -Path $MyInvocation.MyCommand.Definition -Parent -$openpype_root = (Get-Item $script_dir).parent.FullName - $env:_INSIDE_OPENPYPE_TOOL = "1" if (-not (Test-Path 'env:POETRY_HOME')) { @@ -78,31 +79,26 @@ $version_file = Get-Content -Path "$($openpype_root)\openpype\version.py" $result = [regex]::Matches($version_file, '__version__ = "(?\d+\.\d+.\d+.*)"') $openpype_version = $result[0].Groups['version'].Value if (-not $openpype_version) { - Write-Host "!!! " -ForegroundColor yellow -NoNewline - Write-Host "Cannot determine OpenPype version." + Write-Color -Text "!!! ", "Cannot determine OpenPype version." -Color Yellow, Gray Exit-WithCode 1 } -Write-Host ">>> " -NoNewline -ForegroundColor Green -Write-Host "Reading Poetry ... " -NoNewline +Write-Color -Text ">>> ", "Reading Poetry ... " -Color Green, Gray -NoNewline if (-not (Test-Path -PathType Container -Path "$($env:POETRY_HOME)\bin")) { - Write-Host "NOT FOUND" -ForegroundColor Yellow - Write-Host "*** " -NoNewline -ForegroundColor Yellow - Write-Host "We need to install Poetry create virtual env first ..." + Write-Color -Text "NOT FOUND" -Color Yellow + Write-Color -Text "*** ", "We need to install Poetry create virtual env first ..." -Color Yellow, Gray & "$openpype_root\tools\create_env.ps1" } else { - Write-Host "OK" -ForegroundColor Green + Write-Color -Text "OK" -Color Green } -Write-Host ">>> " -NoNewline -ForegroundColor green -Write-Host "Cleaning cache files ... " -NoNewline +Write-Color -Text ">>> ", "Cleaning cache files ... " -Color Green, Gray -NoNewline Get-ChildItem $openpype_root -Filter "*.pyc" -Force -Recurse | Where-Object { $_.FullName -inotmatch 'build' } | Remove-Item -Force Get-ChildItem $openpype_root -Filter "*.pyo" -Force -Recurse | Where-Object { $_.FullName -inotmatch 'build' } | Remove-Item -Force -Get-ChildItem $openpype_root -Filter "__pycache__" -Force -Recurse| Where-Object { $_.FullName -inotmatch 'build' } | Remove-Item -Force -Recurse -Write-Host "OK" -ForegroundColor green +Get-ChildItem $openpype_root -Filter "__pycache__" -Force -Recurse | Where-Object { $_.FullName -inotmatch 'build' } | Remove-Item -Force -Recurse +Write-Color -Text "OK" -Color green -Write-Host ">>> " -NoNewline -ForegroundColor green -Write-Host "Generating zip from current sources ..." +Write-Color -Text ">>> ", "Generating zip from current sources ..." -Color Green, Gray $env:PYTHONPATH="$($openpype_root);$($env:PYTHONPATH)" $env:OPENPYPE_ROOT="$($openpype_root)" & "$($env:POETRY_HOME)\bin\poetry" run python "$($openpype_root)\tools\create_zip.py" $ARGS diff --git a/tools/fetch_thirdparty_libs.ps1 b/tools/fetch_thirdparty_libs.ps1 index 16f7b70e7a..0226a35bfb 100644 --- a/tools/fetch_thirdparty_libs.ps1 +++ b/tools/fetch_thirdparty_libs.ps1 @@ -15,6 +15,9 @@ $current_dir = Get-Location $script_dir = Split-Path -Path $MyInvocation.MyCommand.Definition -Parent $openpype_root = (Get-Item $script_dir).parent.FullName +# Install PSWriteColor to support colorized output to terminal +$env:PSModulePath = $env:PSModulePath + ";$($openpype_root)\vendor\powershell" + $env:_INSIDE_OPENPYPE_TOOL = "1" if (-not (Test-Path 'env:POETRY_HOME')) { @@ -23,15 +26,13 @@ if (-not (Test-Path 'env:POETRY_HOME')) { Set-Location -Path $openpype_root -Write-Host ">>> " -NoNewline -ForegroundColor Green -Write-Host "Reading Poetry ... " -NoNewline +Write-Color -Text ">>> ", "Reading Poetry ... " -Color Green, Gray -NoNewline if (-not (Test-Path -PathType Container -Path "$($env:POETRY_HOME)\bin")) { - Write-Host "NOT FOUND" -ForegroundColor Yellow - Write-Host "*** " -NoNewline -ForegroundColor Yellow - Write-Host "We need to install Poetry create virtual env first ..." + Write-Color -Text "NOT FOUND" -Color Yellow + Write-Color -Text "*** ", "We need to install Poetry create virtual env first ..." -Color Yellow, Gray & "$openpype_root\tools\create_env.ps1" } else { - Write-Host "OK" -ForegroundColor Green + Write-Color -Text "OK" -Color Green } & "$($env:POETRY_HOME)\bin\poetry" run python "$($openpype_root)\tools\fetch_thirdparty_libs.py" diff --git a/tools/make_docs.ps1 b/tools/make_docs.ps1 index 45a11171ae..d356f081de 100644 --- a/tools/make_docs.ps1 +++ b/tools/make_docs.ps1 @@ -44,27 +44,30 @@ $art = @" "@ +$current_dir = Get-Location +$script_dir = Split-Path -Path $MyInvocation.MyCommand.Definition -Parent +$openpype_root = (Get-Item $script_dir).parent.FullName + +# Install PSWriteColor to support colorized output to terminal +$env:PSModulePath = $env:PSModulePath + ";$($openpype_root)\vendor\powershell" + Write-Host $art -ForegroundColor DarkGreen -Write-Host ">>> " -NoNewline -ForegroundColor Green -Write-Host "Reading Poetry ... " -NoNewline +Write-Color -Text ">>> ", "Reading Poetry ... " -Color Green, Gray -NoNewline if (-not (Test-Path -PathType Container -Path "$($env:POETRY_HOME)\bin")) { - Write-Host "NOT FOUND" -ForegroundColor Yellow - Write-Host "*** " -NoNewline -ForegroundColor Yellow - Write-Host "We need to install Poetry create virtual env first ..." - & "$openpype_root\tools\create_env.ps1" + Write-Color -Text "NOT FOUND" -Color Yellow + Install-Poetry + Write-Color -Text "INSTALLED" -Color Cyan } else { - Write-Host "OK" -ForegroundColor Green + Write-Color -Text "OK" -Color Green } -Write-Host "This will not overwrite existing source rst files, only scan and add new." +Write-Color -Text "... ", "This will not overwrite existing source rst files, only scan and add new." -Color Yellow, Gray Set-Location -Path $openpype_root -Write-Host ">>> " -NoNewline -ForegroundColor green -Write-Host "Running apidoc ..." +Write-Color -Text ">>> ", "Running apidoc ..." -Color Green, Gray & "$env:POETRY_HOME\bin\poetry" run sphinx-apidoc -M -e -d 10 --ext-intersphinx --ext-todo --ext-coverage --ext-viewcode -o "$($openpype_root)\docs\source" igniter & "$env:POETRY_HOME\bin\poetry" run sphinx-apidoc.exe -M -e -d 10 --ext-intersphinx --ext-todo --ext-coverage --ext-viewcode -o "$($openpype_root)\docs\source" openpype vendor, openpype\vendor -Write-Host ">>> " -NoNewline -ForegroundColor green -Write-Host "Building html ..." +Write-Color -Text ">>> ", "Building html ..." -Color Green, Gray & "$env:POETRY_HOME\bin\poetry" run python "$($openpype_root)\setup.py" build_sphinx Set-Location -Path $current_dir diff --git a/tools/run_mongo.ps1 b/tools/run_mongo.ps1 index 0f26e86579..a840200252 100644 --- a/tools/run_mongo.ps1 +++ b/tools/run_mongo.ps1 @@ -74,27 +74,20 @@ function Find-Mongo ($preferred_version) { $env:PATH = "$($env:PATH);$($use_version)\bin\" - Write-Host " - auto-added from [ " -NoNewline - Write-Host "$($use_version)\bin\mongod.exe" -NoNewLine -ForegroundColor Cyan - Write-Host " ]" + Write-Color -Text " - auto-added from [ ", "$($use_version)\bin\mongod.exe", " ]" -Color Cyan, White, Cyan return "$($use_version)\bin\mongod.exe" } else { - Write-Host "FAILED " -NoNewLine -ForegroundColor Red - Write-Host "MongoDB not detected" -ForegroundColor Yellow - Write-Host "Tried to find it on standard location " -NoNewline -ForegroundColor Gray - Write-Host " [ " -NoNewline -ForegroundColor Cyan - Write-Host "$($mongoVersions[-1])\bin\mongod.exe" -NoNewline -ForegroundColor White - Write-Host " ] " -NoNewLine -ForegroundColor Cyan - Write-Host "but failed." -ForegroundColor Gray + Write-Color -Text "FAILED " -Color Red -NoNewLine + Write-Color -Text "MongoDB not detected" -Color Yellow + Write-Color -Text "Tried to find it on standard location ", "[ ", "$($mongoVersions[-1])\bin\mongod.exe", " ]", " but failed." -Color Gray, Cyan, White, Cyan, Gray -NoNewline Exit-WithCode 1 } } else { - Write-Host "FAILED " -NoNewLine -ForegroundColor Red - Write-Host "MongoDB not detected in PATH" -ForegroundColor Yellow + Write-Color -Text "FAILED ", "MongoDB not detected in PATH" -Color Red, Yellow Exit-WithCode 1 } } else { - Write-Host "OK" -ForegroundColor Green + Write-Color -Text "OK" -Color Green return Get-Command "mongod" -ErrorAction SilentlyContinue } <# @@ -116,15 +109,7 @@ $dbpath = (Get-Item $openpype_root).parent.FullName + "\mongo_db_data" $preferred_version = "4.0" $mongoPath = Find-Mongo $preferred_version -Write-Host ">>> " -NoNewLine -ForegroundColor Green -Write-Host "Using DB path: " -NoNewLine -Write-Host " [ " -NoNewline -ForegroundColor Cyan -Write-Host "$($dbpath)" -NoNewline -ForegroundColor White -Write-Host " ] "-ForegroundColor Cyan -Write-Host ">>> " -NoNewLine -ForegroundColor Green -Write-Host "Port: " -NoNewLine -Write-Host " [ " -NoNewline -ForegroundColor Cyan -Write-Host "$($port)" -NoNewline -ForegroundColor White -Write-Host " ] " -ForegroundColor Cyan -Start-Process -FilePath $mongopath "--dbpath $($dbpath) --port $($port)" -PassThru | Out-Null +Write-Color -Text ">>> ", "Using DB path: ", "[ ", "$($dbpath)", " ]" -Color Green, Gray, Cyan, White, Cyan +Write-Color -Text ">>> ", "Port: ", "[ ", "$($port)", " ]", -Color Green, Gray, Cyan, White, Cyan +Start-Process -FilePath $mongopath "--dbpath $($dbpath) --port $($port)" -PassThru | Out-Null From ed76a59d9d6fd691c8eaadd3e678952b583aa2a0 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 2 Feb 2022 22:53:00 +0100 Subject: [PATCH 006/785] Remove unused code --- .../hosts/maya/plugins/publish/collect_render.py | 15 +++++---------- 1 file changed, 5 insertions(+), 10 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/collect_render.py b/openpype/hosts/maya/plugins/publish/collect_render.py index cbddb86e53..cca3b43fec 100644 --- a/openpype/hosts/maya/plugins/publish/collect_render.py +++ b/openpype/hosts/maya/plugins/publish/collect_render.py @@ -104,13 +104,12 @@ class CollectMayaRender(pyblish.api.ContextPlugin): if deadline_settings["enabled"]: deadline_url = render_instance.data.get("deadlineUrl") - self._rs = renderSetup.instance() - current_layer = self._rs.getVisibleRenderLayer() - maya_render_layers = { - layer.name(): layer for layer in self._rs.getRenderLayers() - } - self.maya_layers = maya_render_layers + # Retrieve render setup layers + rs = renderSetup.instance() + maya_render_layers = { + layer.name(): layer for layer in rs.getRenderLayers() + } for layer in collected_render_layers: try: @@ -473,10 +472,6 @@ class CollectMayaRender(pyblish.api.ContextPlugin): return pool_a, pool_b - def _get_overrides(self, layer): - rset = self.maya_layers[layer].renderSettingsCollectionInstance() - return rset.getOverrides() - @staticmethod def get_render_attribute(attr, layer): """Get attribute from render options. From f3ac88fb54732fc533e507f6064610a6fdbcd716 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 2 Feb 2022 23:05:30 +0100 Subject: [PATCH 007/785] Move deadline url logic closer together --- .../maya/plugins/publish/collect_render.py | 19 ++++++------------- 1 file changed, 6 insertions(+), 13 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/collect_render.py b/openpype/hosts/maya/plugins/publish/collect_render.py index cca3b43fec..934f81e298 100644 --- a/openpype/hosts/maya/plugins/publish/collect_render.py +++ b/openpype/hosts/maya/plugins/publish/collect_render.py @@ -71,7 +71,6 @@ class CollectMayaRender(pyblish.api.ContextPlugin): def process(self, context): """Entry point to collector.""" render_instance = None - deadline_url = None for instance in context: if "rendering" in instance.data["families"]: @@ -95,16 +94,6 @@ class CollectMayaRender(pyblish.api.ContextPlugin): asset = api.Session["AVALON_ASSET"] workspace = context.data["workspaceDir"] - deadline_settings = ( - context.data - ["system_settings"] - ["modules"] - ["deadline"] - ) - - if deadline_settings["enabled"]: - deadline_url = render_instance.data.get("deadlineUrl") - # Retrieve render setup layers rs = renderSetup.instance() maya_render_layers = { @@ -348,8 +337,12 @@ class CollectMayaRender(pyblish.api.ContextPlugin): "aovSeparator": aov_separator } - if deadline_url: - data["deadlineUrl"] = deadline_url + # Collect Deadline url if Deadline module is enabled + deadline_settings = ( + context.data["system_settings"]["modules"]["deadline"] + ) + if deadline_settings["enabled"]: + data["deadlineUrl"] = render_instance.data.get("deadlineUrl") if self.sync_workfile_version: data["version"] = context.data["version"] From 542f634d73f01ed81127d655ec0902fdc2d006b5 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 2 Feb 2022 23:07:10 +0100 Subject: [PATCH 008/785] Re-use "read" logic from avalon.maya --- .../hosts/maya/plugins/publish/collect_render.py | 16 ++++------------ 1 file changed, 4 insertions(+), 12 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/collect_render.py b/openpype/hosts/maya/plugins/publish/collect_render.py index 934f81e298..caee978b3f 100644 --- a/openpype/hosts/maya/plugins/publish/collect_render.py +++ b/openpype/hosts/maya/plugins/publish/collect_render.py @@ -49,7 +49,8 @@ import maya.app.renderSetup.model.renderSetup as renderSetup import pyblish.api -from avalon import maya, api +import avalon.maya +from avalon import api from openpype.hosts.maya.api.lib_renderproducts import get as get_layer_render_products # noqa: E501 from openpype.hosts.maya.api import lib @@ -352,16 +353,7 @@ class CollectMayaRender(pyblish.api.ContextPlugin): instance.data["version"] = context.data["version"] # Apply each user defined attribute as data - for attr in cmds.listAttr(layer, userDefined=True) or list(): - try: - value = cmds.getAttr("{}.{}".format(layer, attr)) - except Exception: - # Some attributes cannot be read directly, - # such as mesh and color attributes. These - # are considered non-essential to this - # particular publishing pipeline. - value = None - + for attr, value in avalon.maya.read(layer).items(): data[attr] = value # handle standalone renderers @@ -401,7 +393,7 @@ class CollectMayaRender(pyblish.api.ContextPlugin): dict: only overrides with values """ - attributes = maya.read(render_globals) + attributes = avalon.maya.read(render_globals) options = {"renderGlobals": {}} options["renderGlobals"]["Priority"] = attributes["priority"] From f18b12e354ba5ed523474df7262261aab63a9d25 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 2 Feb 2022 23:08:55 +0100 Subject: [PATCH 009/785] Bugfix: use 'renderer' variable that was defined to correctly capture renderman independent of its versions --- openpype/hosts/maya/plugins/publish/collect_render.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/collect_render.py b/openpype/hosts/maya/plugins/publish/collect_render.py index caee978b3f..059988c754 100644 --- a/openpype/hosts/maya/plugins/publish/collect_render.py +++ b/openpype/hosts/maya/plugins/publish/collect_render.py @@ -310,8 +310,7 @@ class CollectMayaRender(pyblish.api.ContextPlugin): "byFrameStep": int( self.get_render_attribute("byFrameStep", layer=layer_name)), - "renderer": self.get_render_attribute("currentRenderer", - layer=layer_name), + "renderer": renderer, # instance subset "family": "renderlayer", "families": ["renderlayer"], From 20c4f86b8fdc06e2016c46563a5c1181258fe0cc Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 2 Feb 2022 23:11:10 +0100 Subject: [PATCH 010/785] Preserve logic to get renderer from in the renderlayer --- openpype/hosts/maya/plugins/publish/collect_render.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/collect_render.py b/openpype/hosts/maya/plugins/publish/collect_render.py index 059988c754..f8d3761b7c 100644 --- a/openpype/hosts/maya/plugins/publish/collect_render.py +++ b/openpype/hosts/maya/plugins/publish/collect_render.py @@ -155,9 +155,8 @@ class CollectMayaRender(pyblish.api.ContextPlugin): layer_name = "rs_{}".format(expected_layer_name) # collect all frames we are expecting to be rendered - renderer = cmds.getAttr( - "defaultRenderGlobals.currentRenderer" - ).lower() + renderer = self.get_render_attribute("currentRenderer", + layer=layer_name) # handle various renderman names if renderer.startswith("renderman"): renderer = "renderman" From a2c05a9f382645a6b37191aefd7b011a0f2ebd6d Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 3 Feb 2022 00:11:07 +0100 Subject: [PATCH 011/785] Simplify subset detection code --- .../maya/plugins/publish/collect_render.py | 25 +++++++++---------- 1 file changed, 12 insertions(+), 13 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/collect_render.py b/openpype/hosts/maya/plugins/publish/collect_render.py index f8d3761b7c..aa5ac40be9 100644 --- a/openpype/hosts/maya/plugins/publish/collect_render.py +++ b/openpype/hosts/maya/plugins/publish/collect_render.py @@ -135,22 +135,21 @@ class CollectMayaRender(pyblish.api.ContextPlugin): self.log.warning(msg) continue - # test if there are sets (subsets) to attach render to + # detect if there are sets (subsets) to attach render to sets = cmds.sets(layer, query=True) or [] attach_to = [] - if sets: - for s in sets: - if "family" not in cmds.listAttr(s): - continue + for s in sets: + if not cmds.attributeQuery("family", node=s, exists=True): + continue - attach_to.append( - { - "version": None, # we need integrator for that - "subset": s, - "family": cmds.getAttr("{}.family".format(s)), - } - ) - self.log.info(" -> attach render to: {}".format(s)) + attach_to.append( + { + "version": None, # we need integrator for that + "subset": s, + "family": cmds.getAttr("{}.family".format(s)), + } + ) + self.log.info(" -> attach render to: {}".format(s)) layer_name = "rs_{}".format(expected_layer_name) From 47622d5dd0a41c73fa9f459c119b18d95825aa16 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 3 Feb 2022 00:18:03 +0100 Subject: [PATCH 012/785] Don't collect aov_separator from settings twice --- openpype/hosts/maya/plugins/publish/collect_render.py | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/collect_render.py b/openpype/hosts/maya/plugins/publish/collect_render.py index aa5ac40be9..44114efd5d 100644 --- a/openpype/hosts/maya/plugins/publish/collect_render.py +++ b/openpype/hosts/maya/plugins/publish/collect_render.py @@ -281,16 +281,6 @@ class CollectMayaRender(pyblish.api.ContextPlugin): self.log.info("collecting layer: {}".format(layer_name)) # Get layer specific settings, might be overrides - try: - aov_separator = self._aov_chars[( - context.data["project_settings"] - ["create"] - ["CreateRender"] - ["aov_separator"] - )] - except KeyError: - aov_separator = "_" - data = { "subset": expected_layer_name, "attachTo": attach_to, From 9f5eb074e474e0392fb02def4369ba61d07f0ef1 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 3 Feb 2022 00:22:10 +0100 Subject: [PATCH 013/785] Don't provide render instance to override render products aov separator and potentially other things. - Leave it up to validators to ensure the output matches what the user wanted it to match so we can never submit wrong renders. --- .../hosts/maya/plugins/publish/collect_render.py | 15 +-------------- 1 file changed, 1 insertion(+), 14 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/collect_render.py b/openpype/hosts/maya/plugins/publish/collect_render.py index 44114efd5d..f4ba862955 100644 --- a/openpype/hosts/maya/plugins/publish/collect_render.py +++ b/openpype/hosts/maya/plugins/publish/collect_render.py @@ -160,22 +160,9 @@ class CollectMayaRender(pyblish.api.ContextPlugin): if renderer.startswith("renderman"): renderer = "renderman" - try: - aov_separator = self._aov_chars[( - context.data["project_settings"] - ["create"] - ["CreateRender"] - ["aov_separator"] - )] - except KeyError: - aov_separator = "_" - - render_instance.data["aovSeparator"] = aov_separator - # return all expected files for all cameras and aovs in given # frame range - layer_render_products = get_layer_render_products( - layer_name, render_instance) + layer_render_products = get_layer_render_products(layer_name) render_products = layer_render_products.layer_data.products assert render_products, "no render products generated" exp_files = [] From 44003cc95292ad7983c56335ecc16b8d4606489d Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 3 Feb 2022 00:37:23 +0100 Subject: [PATCH 014/785] Move logic closer together --- openpype/hosts/maya/plugins/publish/collect_render.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/collect_render.py b/openpype/hosts/maya/plugins/publish/collect_render.py index f4ba862955..3aa1335c74 100644 --- a/openpype/hosts/maya/plugins/publish/collect_render.py +++ b/openpype/hosts/maya/plugins/publish/collect_render.py @@ -215,6 +215,7 @@ class CollectMayaRender(pyblish.api.ContextPlugin): full_paths.append(full_path) publish_meta_path = os.path.dirname(full_path) aov_dict[aov.keys()[0]] = full_paths + full_exp_files.append(aov_dict) frame_start_render = int(self.get_render_attribute( "startFrame", layer=layer_name)) @@ -238,8 +239,6 @@ class CollectMayaRender(pyblish.api.ContextPlugin): frame_start_handle = frame_start_render frame_end_handle = frame_end_render - full_exp_files.append(aov_dict) - # find common path to store metadata # so if image prefix is branching to many directories # metadata file will be located in top-most common From ae35f0e7ab292f2b9d0b2126629e9da11fb3a0ca Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 3 Feb 2022 01:38:27 +0100 Subject: [PATCH 015/785] Refactor the "set_default_render_settings" logic out of CreateRender - This is a first step to allow the default render settings to be applied from elsewhere. - Also simplifies the logic of the actual Creator --- .../maya/plugins/create/create_render.py | 278 +++++++++--------- 1 file changed, 140 insertions(+), 138 deletions(-) diff --git a/openpype/hosts/maya/plugins/create/create_render.py b/openpype/hosts/maya/plugins/create/create_render.py index fa5e73f3ed..ff230a0ff2 100644 --- a/openpype/hosts/maya/plugins/create/create_render.py +++ b/openpype/hosts/maya/plugins/create/create_render.py @@ -24,6 +24,142 @@ from avalon.api import Session from avalon.api import CreatorError +class RenderSettings(object): + + _image_prefix_nodes = { + 'mentalray': 'defaultRenderGlobals.imageFilePrefix', + 'vray': 'vraySettings.fileNamePrefix', + 'arnold': 'defaultRenderGlobals.imageFilePrefix', + 'renderman': 'defaultRenderGlobals.imageFilePrefix', + 'redshift': 'defaultRenderGlobals.imageFilePrefix' + } + + _image_prefixes = { + 'mentalray': 'maya///{aov_separator}', # noqa + 'vray': 'maya///', + 'arnold': 'maya///{aov_separator}', # noqa + 'renderman': 'maya///{aov_separator}', + 'redshift': 'maya///{aov_separator}' # noqa + } + + _aov_chars = { + "dot": ".", + "dash": "-", + "underscore": "_" + } + + def __init__(self, project_settings): + self._project_settings = project_settings + + @staticmethod + def apply_defaults(renderer, project_settings=None): + if project_settings is None: + project_settings = get_project_settings(Session["AVALON_PROJECT"]) + + render_settings = RenderSettings(project_settings) + render_settings.set_default_renderer_settings(renderer) + + def set_default_renderer_settings(self, renderer): + """Set basic settings based on renderer. + + Args: + renderer (str): Renderer name. + + """ + # project_settings/maya/create/CreateRender/aov_separator + try: + aov_separator = self._aov_chars[( + self._project_settings["maya"] + ["create"] + ["CreateRender"] + ["aov_separator"] + )] + except KeyError: + aov_separator = "_" + + prefix = self._image_prefixes[renderer] + prefix = prefix.replace("{aov_separator}", aov_separator) + cmds.setAttr(self._image_prefix_nodes[renderer], + prefix, + type="string") + + asset = get_asset() + width = asset["data"].get("resolutionWidth") + height = asset["data"].get("resolutionHeight") + + if renderer == "arnold": + # set format to exr + cmds.setAttr( + "defaultArnoldDriver.ai_translator", "exr", type="string") + self._set_global_output_settings() + + # resolution + cmds.setAttr("defaultResolution.width", width) + cmds.setAttr("defaultResolution.height", height) + + if renderer == "vray": + self._set_vray_settings(aov_separator, width, height) + + if renderer == "redshift": + # set format to exr + cmds.setAttr("RedshiftOptions.imageFormat", 1) + + # resolution + cmds.setAttr("defaultResolution.width", width) + cmds.setAttr("defaultResolution.height", height) + + self._set_global_output_settings() + + def _set_vray_settings(self, aov_separator, width, height): + # type: (dict) -> None + """Sets important settings for Vray.""" + settings = cmds.ls(type="VRaySettingsNode") + node = settings[0] if settings else cmds.createNode("VRaySettingsNode") + + # Set aov separator + # First we need to explicitly set the UI items in Render Settings + # because that is also what V-Ray updates to when that Render Settings + # UI did initialize before and refreshes again. + MENU = "vrayRenderElementSeparator" + if cmds.optionMenuGrp(MENU, query=True, exists=True): + items = cmds.optionMenuGrp(MENU, query=True, ill=True) + separators = [cmds.menuItem(i, query=True, label=True) for i in items] # noqa: E501 + try: + sep_idx = separators.index(aov_separator) + except ValueError: + raise CreatorError( + "AOV character {} not in {}".format( + aov_separator, separators)) + + cmds.optionMenuGrp(MENU, edit=True, select=sep_idx + 1) + + # Set the render element attribute as string. This is also what V-Ray + # sets whenever the `vrayRenderElementSeparator` menu items switch + cmds.setAttr( + "{}.fileNameRenderElementSeparator".format(node), + aov_separator, + type="string" + ) + + # set format to exr + cmds.setAttr("{}.imageFormatStr".format(node), "exr", type="string") + + # animType + cmds.setAttr("{}.animType".format(node), 1) + + # resolution + cmds.setAttr("{}.width".format(node), width) + cmds.setAttr("{}.height".format(node), height) + + @staticmethod + def _set_global_output_settings(): + # enable animation + cmds.setAttr("defaultRenderGlobals.outFormatControl", 0) + cmds.setAttr("defaultRenderGlobals.animation", 1) + cmds.setAttr("defaultRenderGlobals.putFrameBeforeExt", 1) + cmds.setAttr("defaultRenderGlobals.extensionPadding", 4) + + class CreateRender(plugin.Creator): """Create *render* instance. @@ -70,31 +206,6 @@ class CreateRender(plugin.Creator): _user = None _password = None - # renderSetup instance - _rs = None - - _image_prefix_nodes = { - 'mentalray': 'defaultRenderGlobals.imageFilePrefix', - 'vray': 'vraySettings.fileNamePrefix', - 'arnold': 'defaultRenderGlobals.imageFilePrefix', - 'renderman': 'defaultRenderGlobals.imageFilePrefix', - 'redshift': 'defaultRenderGlobals.imageFilePrefix' - } - - _image_prefixes = { - 'mentalray': 'maya///{aov_separator}', # noqa - 'vray': 'maya///', - 'arnold': 'maya///{aov_separator}', # noqa - 'renderman': 'maya///{aov_separator}', - 'redshift': 'maya///{aov_separator}' # noqa - } - - _aov_chars = { - "dot": ".", - "dash": "-", - "underscore": "_" - } - _project_settings = None def __init__(self, *args, **kwargs): @@ -107,17 +218,6 @@ class CreateRender(plugin.Creator): self._project_settings = get_project_settings( Session["AVALON_PROJECT"]) - # project_settings/maya/create/CreateRender/aov_separator - try: - self.aov_separator = self._aov_chars[( - self._project_settings["maya"] - ["create"] - ["CreateRender"] - ["aov_separator"] - )] - except KeyError: - self.aov_separator = "_" - try: default_servers = deadline_settings["deadline_urls"] project_servers = ( @@ -174,8 +274,8 @@ class CreateRender(plugin.Creator): ]) cmds.setAttr("{}.machineList".format(self.instance), lock=True) - self._rs = renderSetup.instance() - layers = self._rs.getRenderLayers() + rs = renderSetup.instance() + layers = rs.getRenderLayers() if use_selection: print(">>> processing existing layers") sets = [] @@ -190,7 +290,7 @@ class CreateRender(plugin.Creator): # if no render layers are present, create default one with # asterisk selector if not layers: - render_layer = self._rs.createRenderLayer('Main') + render_layer = rs.createRenderLayer('Main') collection = render_layer.createCollection("defaultCollection") collection.getSelector().setPattern('*') @@ -200,7 +300,7 @@ class CreateRender(plugin.Creator): if renderer.startswith('renderman'): renderer = 'renderman' - self._set_default_renderer_settings(renderer) + RenderSettings.apply_defaults(renderer) return self.instance def _deadline_webservice_changed(self): @@ -422,101 +522,3 @@ class CreateRender(plugin.Creator): if "verify" not in kwargs: kwargs["verify"] = not os.getenv("OPENPYPE_DONT_VERIFY_SSL", True) return requests.get(*args, **kwargs) - - def _set_default_renderer_settings(self, renderer): - """Set basic settings based on renderer. - - Args: - renderer (str): Renderer name. - - """ - prefix = self._image_prefixes[renderer] - prefix = prefix.replace("{aov_separator}", self.aov_separator) - cmds.setAttr(self._image_prefix_nodes[renderer], - prefix, - type="string") - - asset = get_asset() - - if renderer == "arnold": - # set format to exr - - cmds.setAttr( - "defaultArnoldDriver.ai_translator", "exr", type="string") - self._set_global_output_settings() - # resolution - cmds.setAttr( - "defaultResolution.width", - asset["data"].get("resolutionWidth")) - cmds.setAttr( - "defaultResolution.height", - asset["data"].get("resolutionHeight")) - - if renderer == "vray": - self._set_vray_settings(asset) - if renderer == "redshift": - _ = self._set_renderer_option( - "RedshiftOptions", "{}.imageFormat", 1 - ) - - # resolution - cmds.setAttr( - "defaultResolution.width", - asset["data"].get("resolutionWidth")) - cmds.setAttr( - "defaultResolution.height", - asset["data"].get("resolutionHeight")) - - self._set_global_output_settings() - - def _set_vray_settings(self, asset): - # type: (dict) -> None - """Sets important settings for Vray.""" - settings = cmds.ls(type="VRaySettingsNode") - node = settings[0] if settings else cmds.createNode("VRaySettingsNode") - - # set separator - # set it in vray menu - if cmds.optionMenuGrp("vrayRenderElementSeparator", exists=True, - q=True): - items = cmds.optionMenuGrp( - "vrayRenderElementSeparator", ill=True, query=True) - - separators = [cmds.menuItem(i, label=True, query=True) for i in items] # noqa: E501 - try: - sep_idx = separators.index(self.aov_separator) - except ValueError: - raise CreatorError( - "AOV character {} not in {}".format( - self.aov_separator, separators)) - - cmds.optionMenuGrp( - "vrayRenderElementSeparator", sl=sep_idx + 1, edit=True) - cmds.setAttr( - "{}.fileNameRenderElementSeparator".format(node), - self.aov_separator, - type="string" - ) - # set format to exr - cmds.setAttr( - "{}.imageFormatStr".format(node), "exr", type="string") - - # animType - cmds.setAttr( - "{}.animType".format(node), 1) - - # resolution - cmds.setAttr( - "{}.width".format(node), - asset["data"].get("resolutionWidth")) - cmds.setAttr( - "{}.height".format(node), - asset["data"].get("resolutionHeight")) - - @staticmethod - def _set_global_output_settings(): - # enable animation - cmds.setAttr("defaultRenderGlobals.outFormatControl", 0) - cmds.setAttr("defaultRenderGlobals.animation", 1) - cmds.setAttr("defaultRenderGlobals.putFrameBeforeExt", 1) - cmds.setAttr("defaultRenderGlobals.extensionPadding", 4) From 542918135f88f2a38e60d9066d23402f9c11e9e2 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 3 Feb 2022 01:42:55 +0100 Subject: [PATCH 016/785] Move more logic to the RenderSettings class --- .../hosts/maya/plugins/create/create_render.py | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/openpype/hosts/maya/plugins/create/create_render.py b/openpype/hosts/maya/plugins/create/create_render.py index ff230a0ff2..05af3f32a9 100644 --- a/openpype/hosts/maya/plugins/create/create_render.py +++ b/openpype/hosts/maya/plugins/create/create_render.py @@ -52,7 +52,14 @@ class RenderSettings(object): self._project_settings = project_settings @staticmethod - def apply_defaults(renderer, project_settings=None): + def apply_defaults(renderer=None, project_settings=None): + if renderer is None: + renderer = cmds.getAttr( + 'defaultRenderGlobals.currentRenderer').lower() + # handle various renderman names + if renderer.startswith('renderman'): + renderer = 'renderman' + if project_settings is None: project_settings = get_project_settings(Session["AVALON_PROJECT"]) @@ -294,13 +301,7 @@ class CreateRender(plugin.Creator): collection = render_layer.createCollection("defaultCollection") collection.getSelector().setPattern('*') - renderer = cmds.getAttr( - 'defaultRenderGlobals.currentRenderer').lower() - # handle various renderman names - if renderer.startswith('renderman'): - renderer = 'renderman' - - RenderSettings.apply_defaults(renderer) + RenderSettings.apply_defaults() return self.instance def _deadline_webservice_changed(self): From 17be6f0f038d91c782b03d4fec3448e088ecfdf0 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 3 Feb 2022 01:48:28 +0100 Subject: [PATCH 017/785] Use log instead of print --- openpype/hosts/maya/plugins/create/create_render.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/maya/plugins/create/create_render.py b/openpype/hosts/maya/plugins/create/create_render.py index 05af3f32a9..fd37b8a709 100644 --- a/openpype/hosts/maya/plugins/create/create_render.py +++ b/openpype/hosts/maya/plugins/create/create_render.py @@ -284,10 +284,10 @@ class CreateRender(plugin.Creator): rs = renderSetup.instance() layers = rs.getRenderLayers() if use_selection: - print(">>> processing existing layers") + self.log.info("Processing existing layers") sets = [] for layer in layers: - print(" - creating set for {}:{}".format( + self.log.info(" - creating set for {}:{}".format( namespace, layer.name())) render_set = cmds.sets( n="{}:{}".format(namespace, layer.name())) @@ -301,6 +301,7 @@ class CreateRender(plugin.Creator): collection = render_layer.createCollection("defaultCollection") collection.getSelector().setPattern('*') + self.log.info("Applying default render settings..") RenderSettings.apply_defaults() return self.instance From 8b0f60eaaa5789a742b2db3feb556e6d4659be70 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 3 Feb 2022 11:24:34 +0100 Subject: [PATCH 018/785] Collect the AOV separator for Render Products in Layer Data --- openpype/hosts/maya/api/lib_renderproducts.py | 45 +++++++++++++++++-- 1 file changed, 42 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/maya/api/lib_renderproducts.py b/openpype/hosts/maya/api/lib_renderproducts.py index e8e4b9aaef..db2c6c1fdc 100644 --- a/openpype/hosts/maya/api/lib_renderproducts.py +++ b/openpype/hosts/maya/api/lib_renderproducts.py @@ -97,6 +97,12 @@ class LayerMetadata(object): # Render Products products = attr.ib(init=False, default=attr.Factory(list)) + # The AOV separator token. Note that not all renderers define an explicit + # render separator but allow to put the AOV/RenderPass token anywhere in + # the file path prefix. For those renderers we'll fall back to whatever + # is between the last occurrences of and tokens. + aov_separator = attr.ib(default="_") + @attr.s class RenderProduct(object): @@ -180,7 +186,6 @@ class ARenderProducts: self.layer = layer self.render_instance = render_instance self.multipart = False - self.aov_separator = render_instance.data.get("aovSeparator", "_") # Initialize self.layer_data = self._get_layer_data() @@ -316,6 +321,31 @@ class ARenderProducts: # defaultRenderLayer renders as masterLayer layer_name = "masterLayer" + # AOV separator - default behavior extracts the part between + # last occurences of and + # todo: This code also triggers for V-Ray which overrides it explicitly + # so this code will invalidly debug log it couldn't extract the + # aov separator even though it does set it in RenderProductsVray + layer_tokens = ["", ""] + aov_tokens = ["", ""] + + def match_last(tokens, text): + """regex match the last occurence from a list of tokens""" + pattern = "(?:.*)({})".format("|".join(tokens)) + return re.search(pattern, text, re.IGNORECASE) + + layer_match = match_last(layer_tokens, file_prefix) + aov_match = match_last(aov_tokens, file_prefix) + kwargs = {} + if layer_match and aov_match: + matches = sorted((layer_match, aov_match), + key=lambda match: match.end(1)) + separator = file_prefix[matches[0].end(1):matches[1].start(1)] + kwargs["aov_separator"] = separator + else: + log.debug("Couldn't extract aov separator from " + "file prefix: {}".format(file_prefix)) + # todo: Support Custom Frames sequences 0,5-10,100-120 # Deadline allows submitting renders with a custom frame list # to support those cases we might want to allow 'custom frames' @@ -332,7 +362,8 @@ class ARenderProducts: layerName=layer_name, renderer=self.renderer, defaultExt=self._get_attr("defaultRenderGlobals.imfPluginKey"), - filePrefix=file_prefix + filePrefix=file_prefix, + **kwargs ) def _generate_file_sequence( @@ -677,9 +708,15 @@ class RenderProductsVray(ARenderProducts): """ prefix = super(RenderProductsVray, self).get_renderer_prefix() - prefix = "{}{}".format(prefix, self.aov_separator) + aov_separator = self._get_aov_separator() + prefix = "{}{}".format(prefix, aov_separator) return prefix + def _get_aov_separator(self): + return self._get_attr( + "vraySettings.fileNameRenderElementSeparator" + ) + def _get_layer_data(self): # type: () -> LayerMetadata """Override to get vray specific extension.""" @@ -691,6 +728,8 @@ class RenderProductsVray(ARenderProducts): layer_data.defaultExt = default_ext layer_data.padding = self._get_attr("vraySettings.fileNamePadding") + layer_data.aov_separator = self._get_aov_separator() + return layer_data def get_render_products(self): From 0516bb0f6f5fa43be7a087dd1664bc1b8bb75425 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 3 Feb 2022 11:39:17 +0100 Subject: [PATCH 019/785] Fix Redshift appending . even when or was explicitly set. --- openpype/hosts/maya/api/lib_renderproducts.py | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/maya/api/lib_renderproducts.py b/openpype/hosts/maya/api/lib_renderproducts.py index db2c6c1fdc..49b6d6f0da 100644 --- a/openpype/hosts/maya/api/lib_renderproducts.py +++ b/openpype/hosts/maya/api/lib_renderproducts.py @@ -80,6 +80,13 @@ IMAGE_PREFIXES = { } +def has_tokens(string, tokens): + """Return whether any of tokens is in input string (case-insensitive)""" + pattern = "({})".format("|".join(re.escape(token) for token in tokens)) + match = re.search(pattern, string, re.IGNORECASE) + return bool(match) + + @attr.s class LayerMetadata(object): """Data class for Render Layer metadata.""" @@ -950,7 +957,11 @@ class RenderProductsRedshift(ARenderProducts): """ prefix = super(RenderProductsRedshift, self).get_renderer_prefix() - prefix = "{}.".format(prefix) + + # Only append . if no or is specified + if not has_tokens(prefix, ["", ""]): + prefix = "{}.".format(prefix) + return prefix def get_render_products(self): From f8e8ce5c61d485b753595f4e2dc9e0e20f1321c3 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 3 Feb 2022 11:39:43 +0100 Subject: [PATCH 020/785] Add docstring --- openpype/hosts/maya/api/lib_renderproducts.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/openpype/hosts/maya/api/lib_renderproducts.py b/openpype/hosts/maya/api/lib_renderproducts.py index 49b6d6f0da..0e1b553619 100644 --- a/openpype/hosts/maya/api/lib_renderproducts.py +++ b/openpype/hosts/maya/api/lib_renderproducts.py @@ -720,6 +720,8 @@ class RenderProductsVray(ARenderProducts): return prefix def _get_aov_separator(self): + # type: () -> str + """Return the V-Ray AOV/Render Elements separator""" return self._get_attr( "vraySettings.fileNameRenderElementSeparator" ) From 862167fc6aa6bec3793b120eac8f84cd901a5035 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 3 Feb 2022 14:23:48 +0100 Subject: [PATCH 021/785] Fix types --- openpype/hosts/maya/plugins/create/create_render.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/create/create_render.py b/openpype/hosts/maya/plugins/create/create_render.py index fd37b8a709..f87e1eac5d 100644 --- a/openpype/hosts/maya/plugins/create/create_render.py +++ b/openpype/hosts/maya/plugins/create/create_render.py @@ -118,7 +118,7 @@ class RenderSettings(object): self._set_global_output_settings() def _set_vray_settings(self, aov_separator, width, height): - # type: (dict) -> None + # type: (str, int, int) -> None """Sets important settings for Vray.""" settings = cmds.ls(type="VRaySettingsNode") node = settings[0] if settings else cmds.createNode("VRaySettingsNode") From ee3a3632731a5afbb3c7355f339f1818990a04ea Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 3 Feb 2022 16:03:12 +0100 Subject: [PATCH 022/785] Move RenderSettings into its on api --- openpype/hosts/maya/api/render_settings.py | 165 +++++++++++++++++ .../maya/plugins/create/create_render.py | 173 ++---------------- .../publish/validate_render_single_camera.py | 18 +- 3 files changed, 186 insertions(+), 170 deletions(-) create mode 100644 openpype/hosts/maya/api/render_settings.py diff --git a/openpype/hosts/maya/api/render_settings.py b/openpype/hosts/maya/api/render_settings.py new file mode 100644 index 0000000000..14f6468d1b --- /dev/null +++ b/openpype/hosts/maya/api/render_settings.py @@ -0,0 +1,165 @@ +import os +import sys + +from maya import cmds +import maya.app.renderSetup.model.renderSetup as renderSetup + +from openpype.hosts.maya.api import ( + lib, + plugin +) +from openpype.api import ( + get_system_settings, + get_project_settings, + get_asset) +from openpype.modules import ModulesManager + +from avalon.api import Session +from avalon.api import CreatorError + + +class RenderSettings(object): + + _image_prefix_nodes = { + 'mentalray': 'defaultRenderGlobals.imageFilePrefix', + 'vray': 'vraySettings.fileNamePrefix', + 'arnold': 'defaultRenderGlobals.imageFilePrefix', + 'renderman': 'defaultRenderGlobals.imageFilePrefix', + 'redshift': 'defaultRenderGlobals.imageFilePrefix' + } + + _image_prefixes = { + 'mentalray': 'maya///{aov_separator}', # noqa + 'vray': 'maya///', + 'arnold': 'maya///{aov_separator}', # noqa + 'renderman': 'maya///{aov_separator}', + 'redshift': 'maya///{aov_separator}' # noqa + } + + _aov_chars = { + "dot": ".", + "dash": "-", + "underscore": "_" + } + + @classmethod + def get_image_prefix_attr(cls, renderer): + return cls._image_prefix_nodes[renderer] + + def __init__(self, project_settings): + self._project_settings = project_settings + + @staticmethod + def apply_defaults(renderer=None, project_settings=None): + if renderer is None: + renderer = cmds.getAttr( + 'defaultRenderGlobals.currentRenderer').lower() + # handle various renderman names + if renderer.startswith('renderman'): + renderer = 'renderman' + + if project_settings is None: + project_settings = get_project_settings(Session["AVALON_PROJECT"]) + + render_settings = RenderSettings(project_settings) + render_settings.set_default_renderer_settings(renderer) + + def set_default_renderer_settings(self, renderer): + """Set basic settings based on renderer. + + Args: + renderer (str): Renderer name. + + """ + # project_settings/maya/create/CreateRender/aov_separator + try: + aov_separator = self._aov_chars[( + self._project_settings["maya"] + ["create"] + ["CreateRender"] + ["aov_separator"] + )] + except KeyError: + aov_separator = "_" + + prefix = self._image_prefixes[renderer] + prefix = prefix.replace("{aov_separator}", aov_separator) + cmds.setAttr(self._image_prefix_nodes[renderer], + prefix, + type="string") + + asset = get_asset() + width = asset["data"].get("resolutionWidth") + height = asset["data"].get("resolutionHeight") + + if renderer == "arnold": + # set format to exr + cmds.setAttr( + "defaultArnoldDriver.ai_translator", "exr", type="string") + self._set_global_output_settings() + + # resolution + cmds.setAttr("defaultResolution.width", width) + cmds.setAttr("defaultResolution.height", height) + + if renderer == "vray": + self._set_vray_settings(aov_separator, width, height) + + if renderer == "redshift": + # set format to exr + cmds.setAttr("RedshiftOptions.imageFormat", 1) + + # resolution + cmds.setAttr("defaultResolution.width", width) + cmds.setAttr("defaultResolution.height", height) + + self._set_global_output_settings() + + def _set_vray_settings(self, aov_separator, width, height): + # type: (str, int, int) -> None + """Sets important settings for Vray.""" + settings = cmds.ls(type="VRaySettingsNode") + node = settings[0] if settings else cmds.createNode("VRaySettingsNode") + + # Set aov separator + # First we need to explicitly set the UI items in Render Settings + # because that is also what V-Ray updates to when that Render Settings + # UI did initialize before and refreshes again. + MENU = "vrayRenderElementSeparator" + if cmds.optionMenuGrp(MENU, query=True, exists=True): + items = cmds.optionMenuGrp(MENU, query=True, ill=True) + separators = [cmds.menuItem(i, query=True, label=True) for i in items] # noqa: E501 + try: + sep_idx = separators.index(aov_separator) + except ValueError: + raise CreatorError( + "AOV character {} not in {}".format( + aov_separator, separators)) + + cmds.optionMenuGrp(MENU, edit=True, select=sep_idx + 1) + + # Set the render element attribute as string. This is also what V-Ray + # sets whenever the `vrayRenderElementSeparator` menu items switch + cmds.setAttr( + "{}.fileNameRenderElementSeparator".format(node), + aov_separator, + type="string" + ) + + # set format to exr + cmds.setAttr("{}.imageFormatStr".format(node), "exr", type="string") + + # animType + cmds.setAttr("{}.animType".format(node), 1) + + # resolution + cmds.setAttr("{}.width".format(node), width) + cmds.setAttr("{}.height".format(node), height) + + @staticmethod + def _set_global_output_settings(): + # enable animation + cmds.setAttr("defaultRenderGlobals.outFormatControl", 0) + cmds.setAttr("defaultRenderGlobals.animation", 1) + cmds.setAttr("defaultRenderGlobals.putFrameBeforeExt", 1) + cmds.setAttr("defaultRenderGlobals.extensionPadding", 4) \ No newline at end of file diff --git a/openpype/hosts/maya/plugins/create/create_render.py b/openpype/hosts/maya/plugins/create/create_render.py index f87e1eac5d..b75105736b 100644 --- a/openpype/hosts/maya/plugins/create/create_render.py +++ b/openpype/hosts/maya/plugins/create/create_render.py @@ -1,170 +1,27 @@ # -*- coding: utf-8 -*- """Create ``Render`` instance in Maya.""" -import os import json +import os +import sys + import appdirs import requests import six -import sys from maya import cmds -import maya.app.renderSetup.model.renderSetup as renderSetup - -from openpype.hosts.maya.api import ( - lib, - plugin -) -from openpype.api import ( - get_system_settings, - get_project_settings, - get_asset) -from openpype.modules import ModulesManager +from maya.app.renderSetup.model import renderSetup from avalon.api import Session -from avalon.api import CreatorError - - -class RenderSettings(object): - - _image_prefix_nodes = { - 'mentalray': 'defaultRenderGlobals.imageFilePrefix', - 'vray': 'vraySettings.fileNamePrefix', - 'arnold': 'defaultRenderGlobals.imageFilePrefix', - 'renderman': 'defaultRenderGlobals.imageFilePrefix', - 'redshift': 'defaultRenderGlobals.imageFilePrefix' - } - - _image_prefixes = { - 'mentalray': 'maya///{aov_separator}', # noqa - 'vray': 'maya///', - 'arnold': 'maya///{aov_separator}', # noqa - 'renderman': 'maya///{aov_separator}', - 'redshift': 'maya///{aov_separator}' # noqa - } - - _aov_chars = { - "dot": ".", - "dash": "-", - "underscore": "_" - } - - def __init__(self, project_settings): - self._project_settings = project_settings - - @staticmethod - def apply_defaults(renderer=None, project_settings=None): - if renderer is None: - renderer = cmds.getAttr( - 'defaultRenderGlobals.currentRenderer').lower() - # handle various renderman names - if renderer.startswith('renderman'): - renderer = 'renderman' - - if project_settings is None: - project_settings = get_project_settings(Session["AVALON_PROJECT"]) - - render_settings = RenderSettings(project_settings) - render_settings.set_default_renderer_settings(renderer) - - def set_default_renderer_settings(self, renderer): - """Set basic settings based on renderer. - - Args: - renderer (str): Renderer name. - - """ - # project_settings/maya/create/CreateRender/aov_separator - try: - aov_separator = self._aov_chars[( - self._project_settings["maya"] - ["create"] - ["CreateRender"] - ["aov_separator"] - )] - except KeyError: - aov_separator = "_" - - prefix = self._image_prefixes[renderer] - prefix = prefix.replace("{aov_separator}", aov_separator) - cmds.setAttr(self._image_prefix_nodes[renderer], - prefix, - type="string") - - asset = get_asset() - width = asset["data"].get("resolutionWidth") - height = asset["data"].get("resolutionHeight") - - if renderer == "arnold": - # set format to exr - cmds.setAttr( - "defaultArnoldDriver.ai_translator", "exr", type="string") - self._set_global_output_settings() - - # resolution - cmds.setAttr("defaultResolution.width", width) - cmds.setAttr("defaultResolution.height", height) - - if renderer == "vray": - self._set_vray_settings(aov_separator, width, height) - - if renderer == "redshift": - # set format to exr - cmds.setAttr("RedshiftOptions.imageFormat", 1) - - # resolution - cmds.setAttr("defaultResolution.width", width) - cmds.setAttr("defaultResolution.height", height) - - self._set_global_output_settings() - - def _set_vray_settings(self, aov_separator, width, height): - # type: (str, int, int) -> None - """Sets important settings for Vray.""" - settings = cmds.ls(type="VRaySettingsNode") - node = settings[0] if settings else cmds.createNode("VRaySettingsNode") - - # Set aov separator - # First we need to explicitly set the UI items in Render Settings - # because that is also what V-Ray updates to when that Render Settings - # UI did initialize before and refreshes again. - MENU = "vrayRenderElementSeparator" - if cmds.optionMenuGrp(MENU, query=True, exists=True): - items = cmds.optionMenuGrp(MENU, query=True, ill=True) - separators = [cmds.menuItem(i, query=True, label=True) for i in items] # noqa: E501 - try: - sep_idx = separators.index(aov_separator) - except ValueError: - raise CreatorError( - "AOV character {} not in {}".format( - aov_separator, separators)) - - cmds.optionMenuGrp(MENU, edit=True, select=sep_idx + 1) - - # Set the render element attribute as string. This is also what V-Ray - # sets whenever the `vrayRenderElementSeparator` menu items switch - cmds.setAttr( - "{}.fileNameRenderElementSeparator".format(node), - aov_separator, - type="string" - ) - - # set format to exr - cmds.setAttr("{}.imageFormatStr".format(node), "exr", type="string") - - # animType - cmds.setAttr("{}.animType".format(node), 1) - - # resolution - cmds.setAttr("{}.width".format(node), width) - cmds.setAttr("{}.height".format(node), height) - - @staticmethod - def _set_global_output_settings(): - # enable animation - cmds.setAttr("defaultRenderGlobals.outFormatControl", 0) - cmds.setAttr("defaultRenderGlobals.animation", 1) - cmds.setAttr("defaultRenderGlobals.putFrameBeforeExt", 1) - cmds.setAttr("defaultRenderGlobals.extensionPadding", 4) +from openpype.api import ( + get_system_settings, + get_project_settings +) +from openpype.hosts.maya.api import ( + lib, + plugin, + render_settings +) +from openpype.modules import ModulesManager class CreateRender(plugin.Creator): @@ -302,7 +159,7 @@ class CreateRender(plugin.Creator): collection.getSelector().setPattern('*') self.log.info("Applying default render settings..") - RenderSettings.apply_defaults() + render_settings.RenderSettings.apply_defaults() return self.instance def _deadline_webservice_changed(self): diff --git a/openpype/hosts/maya/plugins/publish/validate_render_single_camera.py b/openpype/hosts/maya/plugins/publish/validate_render_single_camera.py index 0838b4fbf8..3f08e0cd62 100644 --- a/openpype/hosts/maya/plugins/publish/validate_render_single_camera.py +++ b/openpype/hosts/maya/plugins/publish/validate_render_single_camera.py @@ -1,19 +1,11 @@ import re import pyblish.api -import openpype.api -import openpype.hosts.maya.api.action - from maya import cmds - -ImagePrefixes = { - 'mentalray': 'defaultRenderGlobals.imageFilePrefix', - 'vray': 'vraySettings.fileNamePrefix', - 'arnold': 'defaultRenderGlobals.imageFilePrefix', - 'renderman': 'defaultRenderGlobals.imageFilePrefix', - 'redshift': 'defaultRenderGlobals.imageFilePrefix' -} +import openpype.api +import openpype.hosts.maya.api.action +from openpype.hosts.maya.api.render_settings import RenderSettings class ValidateRenderSingleCamera(pyblish.api.InstancePlugin): @@ -46,7 +38,9 @@ class ValidateRenderSingleCamera(pyblish.api.InstancePlugin): # handle various renderman names if renderer.startswith('renderman'): renderer = 'renderman' - file_prefix = cmds.getAttr(ImagePrefixes[renderer]) + + attr = RenderSettings.get_image_prefix_attr(renderer) + file_prefix = cmds.getAttr(attr) if len(cameras) > 1: if re.search(cls.R_CAMERA_TOKEN, file_prefix): From fc0891c7f0617ffde70f78456051e59b2d04e400 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 3 Feb 2022 16:10:21 +0100 Subject: [PATCH 023/785] Cleanup render_settings.py imports + newline end of file --- openpype/hosts/maya/api/render_settings.py | 12 +----------- 1 file changed, 1 insertion(+), 11 deletions(-) diff --git a/openpype/hosts/maya/api/render_settings.py b/openpype/hosts/maya/api/render_settings.py index 14f6468d1b..48bf7fa56c 100644 --- a/openpype/hosts/maya/api/render_settings.py +++ b/openpype/hosts/maya/api/render_settings.py @@ -1,18 +1,8 @@ -import os -import sys - from maya import cmds -import maya.app.renderSetup.model.renderSetup as renderSetup -from openpype.hosts.maya.api import ( - lib, - plugin -) from openpype.api import ( - get_system_settings, get_project_settings, get_asset) -from openpype.modules import ModulesManager from avalon.api import Session from avalon.api import CreatorError @@ -162,4 +152,4 @@ class RenderSettings(object): cmds.setAttr("defaultRenderGlobals.outFormatControl", 0) cmds.setAttr("defaultRenderGlobals.animation", 1) cmds.setAttr("defaultRenderGlobals.putFrameBeforeExt", 1) - cmds.setAttr("defaultRenderGlobals.extensionPadding", 4) \ No newline at end of file + cmds.setAttr("defaultRenderGlobals.extensionPadding", 4) From d88ed919e6a4a566b8ff8b289415c471de454b00 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 16 Mar 2022 22:09:23 +0100 Subject: [PATCH 024/785] First draft pass of refactoring the Integrator --- openpype/plugins/publish/integrate_new.py | 1076 ++++++++++----------- 1 file changed, 508 insertions(+), 568 deletions(-) diff --git a/openpype/plugins/publish/integrate_new.py b/openpype/plugins/publish/integrate_new.py index e8dab089af..e4986e3b3f 100644 --- a/openpype/plugins/publish/integrate_new.py +++ b/openpype/plugins/publish/integrate_new.py @@ -7,9 +7,8 @@ import clique import errno import six import re -import shutil -from pymongo import DeleteOne, InsertOne +from pymongo import DeleteOne, InsertOne, UpdateOne import pyblish.api from avalon import io from avalon.api import format_template_with_optional_keys @@ -31,6 +30,17 @@ else: log = logging.getLogger(__name__) +def get_frame_padded(frame, padding): + """Return frame number as string with `padding` amount of padded zeros""" + return "{frame:0{padding}d}".format(padding=padding, frame=frame) + + +def get_first_frame_padded(collection): + """Return first frame as padded number from `clique.Collection`""" + start_frame = next(iter(collection.indexes)) + return get_frame_padded(start_frame, padding=collection.padding) + + class IntegrateAssetNew(pyblish.api.InstancePlugin): """Resolve any dependency issues @@ -108,7 +118,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): exclude_families = ["clip"] db_representation_context_keys = [ "project", "asset", "task", "subset", "version", "representation", - "family", "hierarchy", "task", "username" + "family", "hierarchy", "task", "username", "frame", "udim" ] default_template_name = "publish" @@ -116,38 +126,40 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): TMP_FILE_EXT = 'tmp' # file_url : file_size of all published and uploaded files - integrated_file_sizes = {} + destinations = list() # Attributes set by settings template_name_profiles = None subset_grouping_profiles = None def process(self, instance): - self.integrated_file_sizes = {} - if [ef for ef in self.exclude_families - if instance.data["family"] in ef]: + self.destinations = [] + + # Exclude instances that also contain families from exclude families + families = set( + # Consider family and families data + [instance.data["family"]] + instance.data.get("families", []) + ) + if families & set(self.exclude_families): return try: self.register(instance) self.log.info("Integrated Asset in to the database ...") - self.log.info("instance.data: {}".format(instance.data)) - self.handle_destination_files(self.integrated_file_sizes, + self.handle_destination_files(self.destinations, 'finalize') except Exception: # clean destination self.log.critical("Error when registering", exc_info=True) - self.handle_destination_files(self.integrated_file_sizes, 'remove') + self.handle_destination_files(self.destinations, 'remove') six.reraise(*sys.exc_info()) - def register(self, instance): - # Required environment variables - anatomy_data = instance.data["anatomyData"] - - io.install() + def prepare_anatomy(self, instance): + """Prepare anatomy data used to define representation destinations""" context = instance.context + anatomy_data = instance.data["anatomyData"] project_entity = instance.data["projectEntity"] context_asset_name = None @@ -206,8 +218,36 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): # Fill family in anatomy data anatomy_data["family"] = instance.data.get("family") - stagingdir = instance.data.get("stagingDir") - if not stagingdir: + intent_value = instance.context.data.get("intent") + if intent_value and isinstance(intent_value, dict): + intent_value = intent_value.get("value") + + if intent_value: + anatomy_data["intent"] = intent_value + + # Get profile + key_values = { + "families": self.main_family_from_instance(instance), + "tasks": task_name, + "hosts": instance.context.data["hostName"], + "task_types": task_type + } + profile = filter_profiles( + self.template_name_profiles, + key_values, + logger=self.log + ) + + template_name = "publish" + if profile: + template_name = profile["template_name"] + + return template_name, anatomy_data + + def register(self, instance): + + instance_stagingdir = instance.data.get("stagingDir") + if not instance_stagingdir: self.log.info(( "{0} is missing reference to staging directory." " Will try to get it from representation." @@ -215,7 +255,8 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): else: self.log.debug( - "Establishing staging directory @ {0}".format(stagingdir) + "Establishing staging directory " + "@ {0}".format(instance_stagingdir) ) # Ensure at least one file is set up for transfer in staging dir. @@ -227,28 +268,74 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): ) ) - subset = self.get_subset(asset_entity, instance) - instance.data["subsetEntity"] = subset + subset = self.register_subset(instance) + + version = self.register_version(instance, subset) + instance.data["versionEntity"] = version + instance.data['version'] = version['name'] + + existing_repres = list(io.find({ + "parent": version["_id"], + "type": "archived_representation" + })) + + # Find the representations to transfer amongst the files + # Each should be a single representation (as such, a single extension) + template_name, anatomy_data = self.prepare_anatomy(instance) + published_representations = {} + representations = [] + for repre in instance.data["representations"]: + + if "delete" in repre.get("tags", []): + self.log.debug("Skipping representation marked for deletion: " + "{}".format(repre)) + continue + + prepared = self.prepare_representation(repre, + anatomy_data, + template_name, + existing_repres, + version, + instance_stagingdir, + instance) + + # todo: simplify this? + representation = prepared["representation"] + representations.append(representation) + published_representations[representation["_id"]] = prepared + + # Remove old representations if there are any (before insertion of new) + if existing_repres: + repre_ids_to_remove = [repre["_id"] for repre in existing_repres] + io.delete_many({"_id": {"$in": repre_ids_to_remove}}) + + # Write the new representations to the database + io.insert_many(representations) + + instance.data["published_representations"] = published_representations + + self.log.info("Registered {} representations" + "".format(len(representations))) + + def register_version(self, instance, subset): version_number = instance.data["version"] self.log.debug("Next version: v{}".format(version_number)) - version_data = self.create_version_data(context, instance) - + version_data = self.create_version_data(instance) version_data_instance = instance.data.get('versionData') if version_data_instance: version_data.update(version_data_instance) - # TODO rename method from `create_version` to - # `prepare_version` or similar... - version = self.create_version( - subset=subset, - version_number=version_number, - data=version_data - ) - - self.log.debug("Creating version ...") + version = { + "schema": "openpype:version-3.0", + "type": "version", + "parent": subset["_id"], + "name": version_number, + "data": version_data + } + repres = instance.data.get("representations", []) new_repre_names_low = [_repre["name"].lower() for _repre in repres] existing_version = io.find_one({ @@ -258,29 +345,28 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): }) if existing_version is None: + self.log.debug("Creating new version ...") version_id = io.insert_one(version).inserted_id else: + self.log.debug("Updating existing version ...") # Check if instance have set `append` mode which cause that # only replicated representations are set to archive append_repres = instance.data.get("append", False) + bulk_writes = [] # Update version data - # TODO query by _id and - io.update_many({ - 'type': 'version', - 'parent': subset["_id"], - 'name': version_number + version_id = existing_version['_id'] + bulk_writes.append(UpdateOne({ + '_id': version_id }, { '$set': version - }) - version_id = existing_version['_id'] + })) # Find representations of existing version and archive them - current_repres = list(io.find({ + current_repres = io.find({ "type": "representation", "parent": version_id - })) - bulk_writes = [] + }) for repre in current_repres: if append_repres: # archive only duplicated representations @@ -304,346 +390,248 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): ) version = io.find_one({"_id": version_id}) - instance.data["versionEntity"] = version + return version - existing_repres = list(io.find({ - "parent": version_id, - "type": "archived_representation" - })) + def prepare_representation(self, repre, + anatomy_data, + template_name, + existing_repres, + version, + instance_stagingdir, + instance): - instance.data['version'] = version['name'] + # create template data for Anatomy + template_data = copy.deepcopy(anatomy_data) - intent_value = instance.context.data.get("intent") - if intent_value and isinstance(intent_value, dict): - intent_value = intent_value.get("value") + # pre-flight validations + if repre["ext"].startswith("."): + raise ValueError("Extension must not start with a dot '.': " + "{}".format(repre["ext"])) - if intent_value: - anatomy_data["intent"] = intent_value + if repre.get("transfers"): + raise ValueError("Representation is not allowed to have transfers" + "data before integration. " + "Got: {}".format(repre["transfers"])) - anatomy = instance.context.data['anatomy'] + # required representation keys + files = repre['files'] + template_data["representation"] = repre["name"] + template_data["ext"] = repre["ext"] - # Find the representations to transfer amongst the files - # Each should be a single representation (as such, a single extension) - representations = [] - destination_list = [] + # optionals + # retrieve additional anatomy data from representation if exists + for representation_key, anatomy_key in { + # Representation Key: Anatomy data key + "resolutionWidth": "resolution_width", + "resolutionHeight": "resolution_height", + "fps": "fps", + "outputName": "output", + }.items(): + value = repre.get(representation_key) + if value: + template_data[anatomy_key] = value - orig_transfers = [] - if 'transfers' not in instance.data: - instance.data['transfers'] = [] + if repre.get('stagingDir'): + stagingdir = repre['stagingDir'] else: - orig_transfers = list(instance.data['transfers']) + # Fall back to instance staging dir if not explicitly + # set for representation in the instance + self.log.debug("Representation uses instance staging dir: " + "{}".format(instance_stagingdir)) + stagingdir = instance_stagingdir - family = self.main_family_from_instance(instance) + self.log.debug("Anatomy template name: {}".format(template_name)) + anatomy = instance.context.data['anatomy'] + template = os.path.normpath( + anatomy.templates[template_name]["path"]) - key_values = { - "families": family, - "tasks": task_name, - "hosts": instance.context.data["hostName"], - "task_types": task_type - } - profile = filter_profiles( - self.template_name_profiles, - key_values, - logger=self.log - ) + is_sequence_representation = isinstance(files, (list, tuple)) + if is_sequence_representation: + # Collection of files (sequence) + # Get the sequence as a collection. The files must be of a single + # sequence and have no remainder outside of the collections. + collections, remainder = clique.assemble(files, + minimum_items=1) + if not collections: + raise ValueError("No collections found in files: " + "{}".format(files)) + if remainder: + raise ValueError("Files found not detected as part" + " of a sequence: {}".format(remainder)) + if len(collections) > 1: + raise ValueError("Files in sequence are not part of a" + " single sequence collection: " + "{}".format(collections)) + src_collection = collections[0] - template_name = "publish" - if profile: - template_name = profile["template_name"] - - published_representations = {} - for idx, repre in enumerate(instance.data["representations"]): - # reset transfers for next representation - # instance.data['transfers'] is used as a global variable - # in current codebase - instance.data['transfers'] = list(orig_transfers) - - if "delete" in repre.get("tags", []): - continue - - published_files = [] - - # create template data for Anatomy - template_data = copy.deepcopy(anatomy_data) - if intent_value is not None: - template_data["intent"] = intent_value - - resolution_width = repre.get("resolutionWidth") - resolution_height = repre.get("resolutionHeight") - fps = instance.data.get("fps") - - if resolution_width: - template_data["resolution_width"] = resolution_width - if resolution_width: - template_data["resolution_height"] = resolution_height - if resolution_width: - template_data["fps"] = fps - - files = repre['files'] - if repre.get('stagingDir'): - stagingdir = repre['stagingDir'] - - if repre.get("outputName"): - template_data["output"] = repre['outputName'] - - template_data["representation"] = repre["name"] - - ext = repre["ext"] - if ext.startswith("."): - self.log.warning(( - "Implementaion warning: <\"{}\">" - " Representation's extension stored under \"ext\" key " - " started with dot (\"{}\")." - ).format(repre["name"], ext)) - ext = ext[1:] - repre["ext"] = ext - template_data["ext"] = ext - - self.log.info(template_name) - template = os.path.normpath( - anatomy.templates[template_name]["path"]) - - sequence_repre = isinstance(files, list) - repre_context = None - if sequence_repre: - self.log.debug( - "files: {}".format(files)) - src_collections, remainder = clique.assemble(files) - self.log.debug( - "src_tail_collections: {}".format(str(src_collections))) - src_collection = src_collections[0] - - # Assert that each member has identical suffix - src_head = src_collection.format("{head}") - src_tail = src_collection.format("{tail}") - - # fix dst_padding - valid_files = [x for x in files if src_collection.match(x)] - padd_len = len( - valid_files[0].replace(src_head, "").replace(src_tail, "") - ) - src_padding_exp = "%0{}d".format(padd_len) - - test_dest_files = list() - for i in [1, 2]: - template_data["representation"] = repre['ext'] - if not repre.get("udim"): - template_data["frame"] = src_padding_exp % i - else: - template_data["udim"] = src_padding_exp % i - - anatomy_filled = anatomy.format(template_data) - template_filled = anatomy_filled[template_name]["path"] - if repre_context is None: - repre_context = template_filled.used_values - test_dest_files.append( - os.path.normpath(template_filled) - ) - if not repre.get("udim"): - template_data["frame"] = repre_context["frame"] - else: - template_data["udim"] = repre_context["udim"] - - self.log.debug( - "test_dest_files: {}".format(str(test_dest_files))) - - dst_collections, remainder = clique.assemble(test_dest_files) - dst_collection = dst_collections[0] - dst_head = dst_collection.format("{head}") - dst_tail = dst_collection.format("{tail}") - - index_frame_start = None + # If the representation has `frameStart` set it renumbers the + # frame indices of the published collection. It will start from + # that `frameStart` index instead. Thus if that frame start + # differs from the collection we want to shift the destination + # frame indices from the source collection. + destination_indexes = list(src_collection.indexes) + destination_padding = len(get_first_frame_padded(src_collection)) + if repre.get("frameStart") is not None: + index_frame_start = int(repre.get("frameStart")) # TODO use frame padding from right template group - if repre.get("frameStart") is not None: - frame_start_padding = int( - anatomy.templates["render"].get( - "frame_padding", - anatomy.templates["render"].get("padding") - ) + render_template = anatomy.templates["render"] + frame_start_padding = int( + render_template.get( + "frame_padding", + render_template.get("padding") ) - - index_frame_start = int(repre.get("frameStart")) - - # exception for slate workflow - if index_frame_start and "slate" in instance.data["families"]: - index_frame_start -= 1 - - dst_padding_exp = src_padding_exp - dst_start_frame = None - collection_start = list(src_collection.indexes)[0] - for i in src_collection.indexes: - # TODO 1.) do not count padding in each index iteration - # 2.) do not count dst_padding from src_padding before - # index_frame_start check - frame_number = i - collection_start - src_padding = src_padding_exp % i - - src_file_name = "{0}{1}{2}".format( - src_head, src_padding, src_tail) - - dst_padding = src_padding_exp % frame_number - - if index_frame_start is not None: - dst_padding_exp = "%0{}d".format(frame_start_padding) - dst_padding = dst_padding_exp % (index_frame_start + frame_number) # noqa: E501 - elif repre.get("udim"): - dst_padding = int(i) - - dst = "{0}{1}{2}".format( - dst_head, - dst_padding, - dst_tail - ) - - self.log.debug("destination: `{}`".format(dst)) - src = os.path.join(stagingdir, src_file_name) - - self.log.debug("source: {}".format(src)) - instance.data["transfers"].append([src, dst]) - - published_files.append(dst) - - # for adding first frame into db - if not dst_start_frame: - dst_start_frame = dst_padding - - # Store used frame value to template data - if repre.get("frame"): - template_data["frame"] = dst_start_frame - - dst = "{0}{1}{2}".format( - dst_head, - dst_start_frame, - dst_tail - ) - repre['published_path'] = dst - - else: - # Single file - # _______ - # | |\ - # | | - # | | - # | | - # |_______| - # - template_data.pop("frame", None) - fname = files - assert not os.path.isabs(fname), ( - "Given file name is a full path" ) - template_data["representation"] = repre['ext'] - # Store used frame value to template data - if repre.get("udim"): - template_data["udim"] = repre["udim"][0] - src = os.path.join(stagingdir, fname) - anatomy_filled = anatomy.format(template_data) - template_filled = anatomy_filled[template_name]["path"] - repre_context = template_filled.used_values - dst = os.path.normpath(template_filled) - - instance.data["transfers"].append([src, dst]) - - published_files.append(dst) - repre['published_path'] = dst - self.log.debug("__ dst: {}".format(dst)) + # Shift destination sequence to the start frame + src_start_frame = next(iter(src_collection.indexes)) + shift = index_frame_start - src_start_frame + if shift: + destination_indexes = [ + frame + shift for frame in destination_indexes + ] + destination_padding = frame_start_padding + # To construct the destination template with anatomy we require + # a Frame or UDIM tile set for the template data. We use the first + # index of the destination for that because that could've shifted + # from the source indexes, etc. + first_index_padded = get_frame_padded(frame=destination_indexes[0], + padding=destination_padding) if repre.get("udim"): - repre_context["udim"] = repre.get("udim") # store list + # UDIM representations handle ranges in a different manner + template_data["udim"] = first_index_padded + else: + template_data["frame"] = first_index_padded - repre["publishedFiles"] = published_files + # Construct destination collection from template + anatomy_filled = anatomy.format(template_data) + template_filled = anatomy_filled[template_name]["path"] + repre_context = template_filled.used_values + self.log.debug("Template filled: {}".format(str(template_filled))) + dst_collections, _remainder = clique.assemble( + [os.path.normpath(template_filled)], minimum_items=1 + ) + assert not _remainder, "This is a bug" + assert len(dst_collections) == 1, "This is a bug" + dst_collection = dst_collections[0] - for key in self.db_representation_context_keys: - value = template_data.get(key) - if not value: - continue - repre_context[key] = template_data[key] + # Update the destination indexes and padding + dst_collection.indexes = destination_indexes + dst_collection.padding = destination_padding + assert len(src_collection) == len(dst_collection), "This is a bug" - # Use previous representation's id if there are any - repre_id = None - repre_name_low = repre["name"].lower() - for _repre in existing_repres: - # NOTE should we check lowered names? - if repre_name_low == _repre["name"]: - repre_id = _repre["orig_id"] - break + transfers = [] + for src_file_name, dst in zip(src_collection, dst_collection): + src = os.path.join(stagingdir, src_file_name) + self.log.debug("source: {}".format(src)) + self.log.debug("destination: `{}`".format(dst)) + transfers.append(src, dst) - # Create new id if existing representations does not match - if repre_id is None: - repre_id = io.ObjectId() + # Store first frame as published path + # todo: remove `published_path` since it can be retrieved from + # `transfers` by taking the first destination transfers[0][1] + repre['published_path'] = next(iter(dst_collection)) + repre["transfers"].extend(transfers) - data = repre.get("data") or {} - data.update({'path': dst, 'template': template}) - representation = { - "_id": repre_id, - "schema": "openpype:representation-2.0", - "type": "representation", - "parent": version_id, - "name": repre['name'], - "data": data, - "dependencies": instance.data.get("dependencies", "").split(), + else: + # Single file + template_data.pop("frame", None) + fname = files + assert not os.path.isabs(fname), ( + "Given file name is a full path" + ) + # Store used frame value to template data + if repre.get("udim"): + template_data["udim"] = repre["udim"][0] + src = os.path.join(stagingdir, fname) + anatomy_filled = anatomy.format(template_data) + template_filled = anatomy_filled[template_name]["path"] + repre_context = template_filled.used_values + dst = os.path.normpath(template_filled) - # Imprint shortcut to context - # for performance reasons. - "context": repre_context - } + # Single file transfer + self.log.debug("source: {}".format(src)) + self.log.debug("destination: `{}`".format(dst)) + repre["transfers"] = [src, dst] - if repre.get("outputName"): - representation["context"]["output"] = repre['outputName'] + repre['published_path'] = dst - if sequence_repre and repre.get("frameStart") is not None: - representation['context']['frame'] = ( - dst_padding_exp % int(repre.get("frameStart")) - ) + if repre.get("udim"): + repre_context["udim"] = repre.get("udim") # store list - # any file that should be physically copied is expected in - # 'transfers' or 'hardlinks' - if instance.data.get('transfers', False) or \ - instance.data.get('hardlinks', False): - # could throw exception, will be caught in 'process' - # all integration to DB is being done together lower, - # so no rollback needed - self.log.debug("Integrating source files to destination ...") - self.integrated_file_sizes.update(self.integrate(instance)) - self.log.debug("Integrated files {}". - format(self.integrated_file_sizes)) + for key in self.db_representation_context_keys: + value = template_data.get(key) + if not value: + continue + repre_context[key] = template_data[key] - # get 'files' info for representation and all attached resources - self.log.debug("Preparing files information ...") - representation["files"] = self.get_files_info( - instance, - self.integrated_file_sizes) + # Use previous representation's id if there are any + repre_id = None + repre_name_lower = repre["name"].lower() + for _existing_repre in existing_repres: + # NOTE should we check lowered names? + if repre_name_lower == _existing_repre["name"].lower(): + repre_id = _existing_repre["orig_id"] + break - self.log.debug("__ representation: {}".format(representation)) - destination_list.append(dst) - self.log.debug("__ destination_list: {}".format(destination_list)) - instance.data['destination_list'] = destination_list - representations.append(representation) - published_representations[repre_id] = { - "representation": representation, - "anatomy_data": template_data, - "published_files": published_files - } - self.log.debug("__ representations: {}".format(representations)) + # Create new id if existing representations does not match + if repre_id is None: + repre_id = io.ObjectId() - # Remove old representations if there are any (before insertion of new) - if existing_repres: - repre_ids_to_remove = [] - for repre in existing_repres: - repre_ids_to_remove.append(repre["_id"]) - io.delete_many({"_id": {"$in": repre_ids_to_remove}}) + # todo: `repre` is not the actual `representation` entity + # we should simplify/clarify difference between data above + # and the actual representation entity for the database + data = repre.get("data") or {} + data.update({'path': dst, 'template': template}) + representation = { + "_id": repre_id, + "schema": "openpype:representation-2.0", + "type": "representation", + "parent": version["_id"], + "name": repre['name'], + "data": data, + "dependencies": instance.data.get("dependencies", "").split(), - for rep in instance.data["representations"]: - self.log.debug("__ rep: {}".format(rep)) + # Imprint shortcut to context for performance reasons. + "context": repre_context + } - io.insert_many(representations) - instance.data["published_representations"] = ( - published_representations + if repre.get("outputName"): + representation["context"]["output"] = repre['outputName'] + + if is_sequence_representation and repre.get("frameStart") is not None: + representation['context']['frame'] = template_data["frame"] + + # any file that should be physically copied is expected in + # 'transfers' or 'hardlinks' + integrated_files = [] + if instance.data.get('transfers', False) or \ + instance.data.get('hardlinks', False): + # could throw exception, will be caught in 'process' + # all integration to DB is being done together lower, + # so no rollback needed + # todo: separate the actual integrating of the files onto its own + # taking just a list of transfers as inputs (potentially + # with copy mode flag, like hardlink/copy, etc.) + self.log.debug("Integrating source files to destination ...") + integrated_files = self.integrate(instance) + self.log.debug("Integrated files {}".format(integrated_files)) + + # get 'files' info for representation and all attached resources + self.log.debug("Preparing files information ...") + representation["files"] = self.get_files_info( + instance, + integrated_files ) - # self.log.debug("Representation: {}".format(representations)) - self.log.info("Registered {} items".format(len(representations))) + + return { + "representation": representation, + "anatomy_data": template_data, + # todo: avoid the need for 'published_files'? + # backwards compatibility + "published_files": [transfer[1] for transfer in repre["transfers"]] + } def integrate(self, instance): """ Move the files. @@ -653,92 +641,93 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): Args: instance: the instance to integrate Returns: - integrated_file_sizes: dictionary of destination file url and - its size in bytes + list: destination full paths of integrated files """ - # store destination url and size for reporting and rollback - integrated_file_sizes = {} + # store destinations for potential rollback and measuring sizes + destinations = [] transfers = list(instance.data.get("transfers", list())) for src, dest in transfers: - if os.path.normpath(src) != os.path.normpath(dest): + src = os.path.normpath(src) + dest = os.path.normpath(dest) + if src != dest: dest = self.get_dest_temp_url(dest) self.copy_file(src, dest) - # TODO needs to be updated during site implementation - integrated_file_sizes[dest] = os.path.getsize(dest) + destinations.append(dest) # Produce hardlinked copies - # Note: hardlink can only be produced between two files on the same - # server/disk and editing one of the two will edit both files at once. - # As such it is recommended to only make hardlinks between static files - # to ensure publishes remain safe and non-edited. hardlinks = instance.data.get("hardlinks", list()) for src, dest in hardlinks: dest = self.get_dest_temp_url(dest) - self.log.debug("Hardlinking file ... {} -> {}".format(src, dest)) if not os.path.exists(dest): self.hardlink_file(src, dest) - # TODO needs to be updated during site implementation - integrated_file_sizes[dest] = os.path.getsize(dest) + destinations.append(dest) - return integrated_file_sizes + return destinations + + def _create_folder_for_file(self, path): + dirname = os.path.dirname(path) + try: + os.makedirs(dirname) + except OSError as e: + if e.errno == errno.EEXIST: + pass + else: + self.log.critical("An unexpected error occurred.") + six.reraise(*sys.exc_info()) def copy_file(self, src, dst): - """ Copy given source to destination + """Copy source filepath to destination filepath Arguments: src (str): the source file which needs to be copied - dst (str): the destination of the sourc file + dst (str): the destination filepath + + Returns: + None + + """ + self._create_folder_for_file(dst) + self.log.debug("Copying file ... {} -> {}".format(src, dst)) + copyfile(src, dst) + + def hardlink_file(self, src, dst): + """Hardlink source filepath to destination filepath. + + Note: + Hardlink can only be produced between two files on the same + server/disk and editing one of the two will edit both files at + once. As such it is recommended to only make hardlinks between + static files to ensure publishes remain safe and non-edited. + + Arguments: + src (str): the source file which needs to be hardlinked + dst (str): the destination filepath + Returns: None """ - src = os.path.normpath(src) - dst = os.path.normpath(dst) - self.log.debug("Copying file ... {} -> {}".format(src, dst)) - dirname = os.path.dirname(dst) - try: - os.makedirs(dirname) - except OSError as e: - if e.errno == errno.EEXIST: - pass - else: - self.log.critical("An unexpected error occurred.") - six.reraise(*sys.exc_info()) - - # copy file with speedcopy and check if size of files are simetrical - while True: - if not shutil._samefile(src, dst): - copyfile(src, dst) - else: - self.log.critical( - "files are the same {} to {}".format(src, dst) - ) - os.remove(dst) - try: - shutil.copyfile(src, dst) - self.log.debug("Copying files with shutil...") - except OSError as e: - self.log.critical("Cannot copy {} to {}".format(src, dst)) - self.log.critical(e) - six.reraise(*sys.exc_info()) - if str(getsize(src)) in str(getsize(dst)): - break - - def hardlink_file(self, src, dst): - dirname = os.path.dirname(dst) - - try: - os.makedirs(dirname) - except OSError as e: - if e.errno == errno.EEXIST: - pass - else: - self.log.critical("An unexpected error occurred.") - six.reraise(*sys.exc_info()) - + self._create_folder_for_file(dst) + self.log.debug("Hardlinking file ... {} -> {}".format(src, dst)) create_hard_link(src, dst) - def get_subset(self, asset, instance): + def _get_instance_families(self, instance): + """Get all families of the instance""" + # todo: move this to lib? + family = instance.data.get("family") + families = [] + if family: + families.append(family) + + for _family in (instance.data.get("families") or []): + if _family not in families: + families.append(_family) + + return families + + def register_subset(self, instance): + # todo: rely less on self.prepare_anatomy to create this value + asset = instance.data.get("assetEntity") # <- from prepare_anatomy :( subset_name = instance.data["subset"] subset = io.find_one({ "type": "subset", @@ -748,18 +737,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): if subset is None: self.log.info("Subset '%s' not found, creating ..." % subset_name) - self.log.debug("families. %s" % instance.data.get('families')) - self.log.debug( - "families. %s" % type(instance.data.get('families'))) - - family = instance.data.get("family") - families = [] - if family: - families.append(family) - - for _family in (instance.data.get("families") or []): - if _family not in families: - families.append(_family) + families = self._get_instance_families(instance) _id = io.insert_one({ "schema": "openpype:subset-3.0", @@ -773,8 +751,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): subset = io.find_one({"_id": _id}) - # QUESTION Why is changing of group and updating it's - # families in 'get_subset'? + # Update subset group self._set_subset_group(instance, subset["_id"]) # Update families on subset. @@ -838,7 +815,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): self.subset_grouping_profiles, filtering_criteria ) - # Skip if there is not matchin profile + # Skip if there is not matching profile if not matching_profile: return None @@ -867,41 +844,17 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): return filled_template - def create_version(self, subset, version_number, data=None): - """ Copy given source to destination - - Args: - subset (dict): the registered subset of the asset - version_number (int): the version number - - Returns: - dict: collection of data to create a version - """ - - return {"schema": "openpype:version-3.0", - "type": "version", - "parent": subset["_id"], - "name": version_number, - "data": data} - - def create_version_data(self, context, instance): + def create_version_data(self, instance): """Create the data collection for the version Args: - context: the current context instance: the current instance being published Returns: dict: the required information with instance.data as key """ - families = [] - current_families = instance.data.get("families", list()) - instance_family = instance.data.get("family", None) - - if instance_family is not None: - families.append(instance_family) - families += current_families + context = instance.context # create relative source path for DB if "source" in instance.data: @@ -910,10 +863,10 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): source = context.data["currentFile"] anatomy = instance.context.data["anatomy"] source = self.get_rootless_path(anatomy, source) - self.log.debug("Source: {}".format(source)) + version_data = { - "families": families, + "families": self._get_instance_families(instance), "time": context.data["time"], "author": context.data["user"], "source": source, @@ -924,7 +877,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): ) } - intent_value = instance.context.data.get("intent") + intent_value = context.data.get("intent") if intent_value and isinstance(intent_value, dict): intent_value = intent_value.get("value") @@ -944,10 +897,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): def main_family_from_instance(self, instance): """Returns main family of entered instance.""" - family = instance.data.get("family") - if not family: - family = instance.data["families"][0] - return family + return self._get_instance_families(instance)[0] def get_rootless_path(self, anatomy, path): """ Returns, if possible, path without absolute portion from host @@ -976,7 +926,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): ).format(path)) return path - def get_files_info(self, instance, integrated_file_sizes): + def get_files_info(self, instance): """ Prepare 'files' portion for attached resources and main asset. Combining records from 'transfers' and 'hardlinks' parts from instance. @@ -991,27 +941,18 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): output_resources: array of dictionaries to be added to 'files' key in representation """ + # todo: refactor to use transfers/hardlinks of representations + # currently broken logic resources = list(instance.data.get("transfers", [])) resources.extend(list(instance.data.get("hardlinks", []))) + self.log.debug("get_files_info.resources:{}".format(resources)) - self.log.debug("get_resource_files_info.resources:{}". - format(resources)) + sites = self.compute_resource_sync_sites(instance) output_resources = [] anatomy = instance.context.data["anatomy"] for _src, dest in resources: - path = self.get_rootless_path(anatomy, dest) - dest = self.get_dest_temp_url(dest) - file_hash = openpype.api.source_hash(dest) - if self.TMP_FILE_EXT and \ - ',{}'.format(self.TMP_FILE_EXT) in file_hash: - file_hash = file_hash.replace(',{}'.format(self.TMP_FILE_EXT), - '') - - file_info = self.prepare_file_info(path, - integrated_file_sizes[dest], - file_hash, - instance=instance) + file_info = self.prepare_file_info(dest, anatomy, sites=sites) output_resources.append(file_info) return output_resources @@ -1031,8 +972,11 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): dest += '.{}'.format(self.TMP_FILE_EXT) return dest - def prepare_file_info(self, path, size=None, file_hash=None, - sites=None, instance=None): + def get_dest_final_url(self, temp_file_url): + """Temporary destination file url to final destination file url""" + return re.sub(r'\.{}$'.format(self.TMP_FILE_EXT), '', temp_file_url) + + def prepare_file_info(self, path, anatomy, sites): """ Prepare information for one file (asset or resource) Arguments: @@ -1042,74 +986,78 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): sites(optional): array of published locations, [ {'name':'studio', 'created_dt':date} by default keys expected ['studio', 'site1', 'gdrive1'] - instance(dict, optional): to get collected settings Returns: rec: dictionary with filled info """ + file_hash = openpype.api.source_hash(path) + + # todo: Avoid this logic + # Strip the temporary file extension from the file hash + if self.TMP_FILE_EXT and ',{}'.format(self.TMP_FILE_EXT) in file_hash: + file_hash = file_hash.replace(',{}'.format(self.TMP_FILE_EXT), '') + + return { + "_id": io.ObjectId(), + "path": self.get_rootless_path(anatomy, path), + "size": os.path.getsize(path), + "hash": file_hash, + "sites": sites + } + + def compute_resource_sync_sites(self, instance): + """Get available resource sync sites""" + # Sync server logic + # TODO: Clean up sync settings local_site = 'studio' # default remote_site = None - always_accesible = [] + always_accessible = [] sync_project_presets = None - rec = { - "_id": io.ObjectId(), - "path": path - } - if size: - rec["size"] = size + system_sync_server_presets = ( + instance.context.data["system_settings"] + ["modules"] + ["sync_server"]) + log.debug("system_sett:: {}".format(system_sync_server_presets)) - if file_hash: - rec["hash"] = file_hash - - if sites: - rec["sites"] = sites - else: - system_sync_server_presets = ( - instance.context.data["system_settings"] - ["modules"] + if system_sync_server_presets["enabled"]: + sync_project_presets = ( + instance.context.data["project_settings"] + ["global"] ["sync_server"]) - log.debug("system_sett:: {}".format(system_sync_server_presets)) - if system_sync_server_presets["enabled"]: - sync_project_presets = ( - instance.context.data["project_settings"] - ["global"] - ["sync_server"]) + if sync_project_presets and sync_project_presets["enabled"]: + local_site, remote_site = self._get_sites(sync_project_presets) + always_accessible = sync_project_presets["config"]. \ + get("always_accessible_on", []) - if sync_project_presets and sync_project_presets["enabled"]: - local_site, remote_site = self._get_sites(sync_project_presets) + already_attached_sites = {} + meta = {"name": local_site, "created_dt": datetime.now()} + sites = [meta] + already_attached_sites[meta["name"]] = meta["created_dt"] - always_accesible = sync_project_presets["config"]. \ - get("always_accessible_on", []) + if sync_project_presets and sync_project_presets["enabled"]: + if remote_site and \ + remote_site not in already_attached_sites.keys(): + # add remote + meta = {"name": remote_site.strip()} + sites.append(meta) + already_attached_sites[meta["name"]] = None - already_attached_sites = {} - meta = {"name": local_site, "created_dt": datetime.now()} - rec["sites"] = [meta] - already_attached_sites[meta["name"]] = meta["created_dt"] - - if sync_project_presets and sync_project_presets["enabled"]: - if remote_site and \ - remote_site not in already_attached_sites.keys(): - # add remote - meta = {"name": remote_site.strip()} - rec["sites"].append(meta) + # add skeleton for site where it should be always synced to + for always_on_site in always_accessible: + if always_on_site not in already_attached_sites.keys(): + meta = {"name": always_on_site.strip()} + sites.append(meta) already_attached_sites[meta["name"]] = None - # add skeleton for site where it should be always synced to - for always_on_site in always_accesible: - if always_on_site not in already_attached_sites.keys(): - meta = {"name": always_on_site.strip()} - rec["sites"].append(meta) - already_attached_sites[meta["name"]] = None + # add alternative sites + alt = self._add_alternative_sites(system_sync_server_presets, + already_attached_sites) + sites.extend(alt) - # add alternative sites - rec = self._add_alternative_sites(system_sync_server_presets, - already_attached_sites, - rec) + log.debug("final sites:: {}".format(sites)) - log.debug("final sites:: {}".format(rec["sites"])) - - return rec + return sites def _get_sites(self, sync_project_presets): """Returns tuple (local_site, remote_site)""" @@ -1129,14 +1077,14 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): def _add_alternative_sites(self, system_sync_server_presets, - already_attached_sites, - rec): + already_attached_sites): """Loop through all configured sites and add alternatives. See SyncServerModule.handle_alternate_site """ conf_sites = system_sync_server_presets.get("sites", {}) + alternative_sites = [] for site_name, site_info in conf_sites.items(): alt_sites = set(site_info.get("alternative_sites", [])) already_attached_keys = list(already_attached_sites.keys()) @@ -1149,12 +1097,12 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): # alt site inherits state of 'created_dt' if real_created: meta["created_dt"] = real_created - rec["sites"].append(meta) + alternative_sites.append(meta) already_attached_sites[meta["name"]] = real_created - return rec + return alternative_sites - def handle_destination_files(self, integrated_file_sizes, mode): + def handle_destination_files(self, destinations, mode): """ Clean destination files Called when error happened during integrating to DB or to disk OR called to rename uploaded files from temporary name to final to @@ -1162,46 +1110,38 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): Used to clean unwanted files Arguments: - integrated_file_sizes: dictionary, file urls as keys, size as value + destinations (list): file paths mode: 'remove' - clean files, 'finalize' - rename files, remove TMP_FILE_EXT suffix denoting temp file """ - if integrated_file_sizes: - for file_url, _file_size in integrated_file_sizes.items(): - if not os.path.exists(file_url): + if not destinations: + return + + for file_url in destinations: + if not os.path.exists(file_url): + self.log.debug( + "File {} was not found.".format(file_url) + ) + continue + + try: + if mode == 'remove': + self.log.debug("Removing file {}".format(file_url)) + os.remove(file_url) + if mode == 'finalize': + + new_name = self.get_dest_final_url(file_url) + if os.path.exists(new_name): + self.log.debug("Removing existing " + "file: {}".format(new_name)) + os.remove(new_name) + self.log.debug( - "File {} was not found.".format(file_url) + "Renaming file {} to {}".format(file_url, new_name) ) - continue - - try: - if mode == 'remove': - self.log.debug("Removing file {}".format(file_url)) - os.remove(file_url) - if mode == 'finalize': - new_name = re.sub( - r'\.{}$'.format(self.TMP_FILE_EXT), - '', - file_url - ) - - if os.path.exists(new_name): - self.log.debug( - "Overwriting file {} to {}".format( - file_url, new_name - ) - ) - shutil.copy(file_url, new_name) - os.remove(file_url) - else: - self.log.debug( - "Renaming file {} to {}".format( - file_url, new_name - ) - ) - os.rename(file_url, new_name) - except OSError: - self.log.error("Cannot {} file {}".format(mode, file_url), - exc_info=True) - six.reraise(*sys.exc_info()) + os.rename(file_url, new_name) + except OSError: + self.log.error("Cannot {} file {}".format(mode, file_url), + exc_info=True) + six.reraise(*sys.exc_info()) From ae1a9ff4cf996445bd74dcd7641639ed8342592e Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 17 Mar 2022 11:49:12 +0100 Subject: [PATCH 025/785] More refactoring + draft (untested) implementation for separating File Transaction logic --- openpype/plugins/publish/integrate_new.py | 421 +++++++++++----------- 1 file changed, 215 insertions(+), 206 deletions(-) diff --git a/openpype/plugins/publish/integrate_new.py b/openpype/plugins/publish/integrate_new.py index e4986e3b3f..500456eaed 100644 --- a/openpype/plugins/publish/integrate_new.py +++ b/openpype/plugins/publish/integrate_new.py @@ -1,12 +1,10 @@ import os -from os.path import getsize import logging import sys import copy import clique import errno import six -import re from pymongo import DeleteOne, InsertOne, UpdateOne import pyblish.api @@ -14,7 +12,6 @@ from avalon import io from avalon.api import format_template_with_optional_keys import openpype.api from datetime import datetime -# from pype.modules import ModulesManager from openpype.lib.profiles_filtering import filter_profiles from openpype.lib import ( prepare_template_data, @@ -41,6 +38,160 @@ def get_first_frame_padded(collection): return get_frame_padded(start_frame, padding=collection.padding) +class FileTransaction(object): + """ + + The file transaction is a three step process. + + 1) Rename any existing files to a "temporary backup" during `process()` + 2) Copy the files to final destination during `process()` + 3) Remove any backed up files (*no rollback possible!) during `finalize()` + + Step 3 is done during `finalize()`. If not called the .bak files will + remain on disk. + + These steps try to ensure that we don't overwrite half of any existing + files e.g. if they are currently in use. + + Note: + A regular filesystem is *not* a transactional file system and even + though this implementation tries to produce a 'safe copy' with a + potential rollback do keep in mind that it's inherently unsafe due + to how filesystem works and a myriad of things could happen during + the transaction that break the logic. A file storage could go down, + permissions could be changed, other machines could be moving or writing + files. A lot can happen. + + Warning: + Any folders created during the transfer will not be removed. + + """ + + MODE_COPY = 0 + MODE_HARDLINK = 1 + + def __init__(self, log=None): + + if log is None: + log = logging.getLogger("FileTransaction") + + self.log = log + + # The transfer queue + # todo: make this an actual FIFO queue? + self._transfers = {} + + # Destination file paths that a file was transferred to + self._transferred = [] + + # Backup file location mapping to original locations + self._backup_to_original = {} + + def add(self, src, dst, mode=MODE_COPY): + """Add a new file to transfer queue""" + opts = {"mode": mode} + + src = os.path.normpath(src) + dst = os.path.normpath(dst) + + if dst in self._transfers: + queued_src = self._transfers[dst][0] + if src == queued_src: + self.log.debug("File transfer was already " + "in queue: {} -> {}".format(src, dst)) + return + else: + self.log.warning("File transfer in queue overwritten") + + self._transfers[dst] = (src, opts) + + def process(self): + + # Backup any existing files + for dst in self._transfers.keys(): + if os.path.exists(dst): + # Backup original file + # todo: add timestamp or uuid to ensure unique + backup = dst + ".bak" + self._backup_to_original[backup] = dst + self.log.debug("Backup existing file: " + "{} -> {}".format(dst, backup)) + os.rename(dst, backup) + + # Copy the files to transfer + for dst, (src, opts) in self._transfers.items(): + self._create_folder_for_file(dst) + + if opts["mode"] == self.MODE_COPY: + self.log.debug("Copying file ... {} -> {}".format(src, dst)) + copyfile(src, dst) + elif opts["mode"] == self.MODE_HARDLINK: + self.log.debug("Hardlinking file ... {} -> {}".format(src, dst)) + create_hard_link(src, dst) + + self._transferred.append(dst) + + def finalize(self): + # Delete any backed up files + for backup in self._backup_to_original.keys(): + try: + os.remove(backup) + except OSError: + self.log.error("Failed to remove backup file: " + "{}".format(backup), + exc_info=True) + + def rollback(self): + + errors = 0 + + # Rollback any transferred files + for path in self._transferred: + try: + os.remove(path) + except OSError: + errors += 1 + self.log.error("Failed to rollback created file: " + "{}".format(path), + exc_info=True) + + # Rollback the backups + for backup, original in self._backup_to_original.items(): + try: + os.rename(backup, original) + except OSError: + errors +=1 + self.log.error("Failed to restore original file: " + "{} -> {}".format(backup, original), + exc_info=True) + + if errors: + self.log.error("{} errors occurred during " + "rollback.".format(errors), exc_info=True) + six.reraise(*sys.exc_info()) + + @property + def transferred(self): + """Return the processed transfers destination paths""" + return list(self._transferred) + + @property + def backups(self): + """Return the backup file paths""" + return list(self._backup_to_original.keys()) + + def _create_folder_for_file(self, path): + dirname = os.path.dirname(path) + try: + os.makedirs(dirname) + except OSError as e: + if e.errno == errno.EEXIST: + pass + else: + self.log.critical("An unexpected error occurred.") + six.reraise(*sys.exc_info()) + + class IntegrateAssetNew(pyblish.api.InstancePlugin): """Resolve any dependency issues @@ -122,18 +273,11 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): ] default_template_name = "publish" - # suffix to denote temporary files, use without '.' - TMP_FILE_EXT = 'tmp' - - # file_url : file_size of all published and uploaded files - destinations = list() - # Attributes set by settings template_name_profiles = None subset_grouping_profiles = None def process(self, instance): - self.destinations = [] # Exclude instances that also contain families from exclude families families = set( @@ -143,17 +287,20 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): if families & set(self.exclude_families): return + file_transactions = FileTransaction(log=self.log) try: - self.register(instance) - self.log.info("Integrated Asset in to the database ...") - self.handle_destination_files(self.destinations, - 'finalize') + self.register(instance, file_transactions) except Exception: # clean destination + # todo: rollback any registered entities? (or how safe are we?) + file_transactions.rollback() self.log.critical("Error when registering", exc_info=True) - self.handle_destination_files(self.destinations, 'remove') six.reraise(*sys.exc_info()) + # Finalizing can't be rollbacked safely so no use for moving it to + # the try, except. + file_transactions.finalize() + def prepare_anatomy(self, instance): """Prepare anatomy data used to define representation destinations""" @@ -244,7 +391,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): return template_name, anatomy_data - def register(self, instance): + def register(self, instance, file_transactions): instance_stagingdir = instance.data.get("stagingDir") if not instance_stagingdir: @@ -272,9 +419,8 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): version = self.register_version(instance, subset) instance.data["versionEntity"] = version - instance.data['version'] = version['name'] - existing_repres = list(io.find({ + archived_repres = list(io.find({ "parent": version["_id"], "type": "archived_representation" })) @@ -294,19 +440,47 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): prepared = self.prepare_representation(repre, anatomy_data, template_name, - existing_repres, + archived_repres, version, instance_stagingdir, instance) + representation = prepared["representation"] + + # todo: register the file transfers correctly + for src, dst in representation["transfers"]: + file_transactions.add(src, dst, + mode=file_transactions.MODE_COPY) + for src, dst in representation["hardlinks"]: + file_transactions.add(src, dst, + mode=file_transactions.MODE_HARDLINK) # todo: simplify this? - representation = prepared["representation"] representations.append(representation) published_representations[representation["_id"]] = prepared + # could throw exception, will be caught in 'process' + # all integration to DB is being done together lower, + # so no rollback needed + self.log.debug("Integrating source files to destination ...") + file_transactions.process() + self.log.debug("Backup files " + "{}".format(file_transactions.backups)) + self.log.debug("Integrated files " + "{}".format(file_transactions.transferred)) + + # todo: fix get file info for transferred files per representation + # currently it'd set all files for all representations + # get 'files' info for representation and all attached resources + integrated_files = file_transactions.transferred + self.log.debug("Preparing files information ...") + representation["files"] = self.get_files_info( + instance, + integrated_files + ) + # Remove old representations if there are any (before insertion of new) - if existing_repres: - repre_ids_to_remove = [repre["_id"] for repre in existing_repres] + if archived_repres: + repre_ids_to_remove = [repre["_id"] for repre in archived_repres] io.delete_many({"_id": {"$in": repre_ids_to_remove}}) # Write the new representations to the database @@ -395,7 +569,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): def prepare_representation(self, repre, anatomy_data, template_name, - existing_repres, + archived_repres, version, instance_stagingdir, instance): @@ -439,11 +613,13 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): self.log.debug("Representation uses instance staging dir: " "{}".format(instance_stagingdir)) stagingdir = instance_stagingdir + if not stagingdir: + raise ValueError("No staging directory set for representation: " + "{}".format(repre)) self.log.debug("Anatomy template name: {}".format(template_name)) anatomy = instance.context.data['anatomy'] - template = os.path.normpath( - anatomy.templates[template_name]["path"]) + template = os.path.normpath(anatomy.templates[template_name]["path"]) is_sequence_representation = isinstance(files, (list, tuple)) if is_sequence_representation: @@ -566,24 +742,21 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): continue repre_context[key] = template_data[key] - # Use previous representation's id if there are any - repre_id = None - repre_name_lower = repre["name"].lower() - for _existing_repre in existing_repres: - # NOTE should we check lowered names? - if repre_name_lower == _existing_repre["name"].lower(): - repre_id = _existing_repre["orig_id"] - break + # Define representation id + repre_id = io.ObjectId() - # Create new id if existing representations does not match - if repre_id is None: - repre_id = io.ObjectId() + # Use previous representation's id if there is a name match + repre_name_lower = repre["name"].lower() + for _archived_repres in archived_repres: + if repre_name_lower == _archived_repres["name"].lower(): + repre_id = _archived_repres["orig_id"] + break # todo: `repre` is not the actual `representation` entity # we should simplify/clarify difference between data above # and the actual representation entity for the database data = repre.get("data") or {} - data.update({'path': dst, 'template': template}) + data.update({'path': repre["published_path"], 'template': template}) representation = { "_id": repre_id, "schema": "openpype:representation-2.0", @@ -597,34 +770,14 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): "context": repre_context } + # todo: simplify/streamline which additional data makes its way into + # the representation context if repre.get("outputName"): representation["context"]["output"] = repre['outputName'] if is_sequence_representation and repre.get("frameStart") is not None: representation['context']['frame'] = template_data["frame"] - # any file that should be physically copied is expected in - # 'transfers' or 'hardlinks' - integrated_files = [] - if instance.data.get('transfers', False) or \ - instance.data.get('hardlinks', False): - # could throw exception, will be caught in 'process' - # all integration to DB is being done together lower, - # so no rollback needed - # todo: separate the actual integrating of the files onto its own - # taking just a list of transfers as inputs (potentially - # with copy mode flag, like hardlink/copy, etc.) - self.log.debug("Integrating source files to destination ...") - integrated_files = self.integrate(instance) - self.log.debug("Integrated files {}".format(integrated_files)) - - # get 'files' info for representation and all attached resources - self.log.debug("Preparing files information ...") - representation["files"] = self.get_files_info( - instance, - integrated_files - ) - return { "representation": representation, "anatomy_data": template_data, @@ -633,84 +786,6 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): "published_files": [transfer[1] for transfer in repre["transfers"]] } - def integrate(self, instance): - """ Move the files. - - Through `instance.data["transfers"]` - - Args: - instance: the instance to integrate - Returns: - list: destination full paths of integrated files - """ - # store destinations for potential rollback and measuring sizes - destinations = [] - transfers = list(instance.data.get("transfers", list())) - for src, dest in transfers: - src = os.path.normpath(src) - dest = os.path.normpath(dest) - if src != dest: - dest = self.get_dest_temp_url(dest) - self.copy_file(src, dest) - destinations.append(dest) - - # Produce hardlinked copies - hardlinks = instance.data.get("hardlinks", list()) - for src, dest in hardlinks: - dest = self.get_dest_temp_url(dest) - if not os.path.exists(dest): - self.hardlink_file(src, dest) - - destinations.append(dest) - - return destinations - - def _create_folder_for_file(self, path): - dirname = os.path.dirname(path) - try: - os.makedirs(dirname) - except OSError as e: - if e.errno == errno.EEXIST: - pass - else: - self.log.critical("An unexpected error occurred.") - six.reraise(*sys.exc_info()) - - def copy_file(self, src, dst): - """Copy source filepath to destination filepath - - Arguments: - src (str): the source file which needs to be copied - dst (str): the destination filepath - - Returns: - None - - """ - self._create_folder_for_file(dst) - self.log.debug("Copying file ... {} -> {}".format(src, dst)) - copyfile(src, dst) - - def hardlink_file(self, src, dst): - """Hardlink source filepath to destination filepath. - - Note: - Hardlink can only be produced between two files on the same - server/disk and editing one of the two will edit both files at - once. As such it is recommended to only make hardlinks between - static files to ensure publishes remain safe and non-edited. - - Arguments: - src (str): the source file which needs to be hardlinked - dst (str): the destination filepath - - Returns: - None - """ - self._create_folder_for_file(dst) - self.log.debug("Hardlinking file ... {} -> {}".format(src, dst)) - create_hard_link(src, dst) - def _get_instance_families(self, instance): """Get all families of the instance""" # todo: move this to lib? @@ -727,7 +802,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): def register_subset(self, instance): # todo: rely less on self.prepare_anatomy to create this value - asset = instance.data.get("assetEntity") # <- from prepare_anatomy :( + asset = instance.data.get("assetEntity") # stored by prepare_anatomy subset_name = instance.data["subset"] subset = io.find_one({ "type": "subset", @@ -957,25 +1032,6 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): return output_resources - def get_dest_temp_url(self, dest): - """ Enhance destination path with TMP_FILE_EXT to denote temporary - file. - Temporary files will be renamed after successful registration - into DB and full copy to destination - - Arguments: - dest: destination url of published file (absolute) - Returns: - dest: destination path + '.TMP_FILE_EXT' - """ - if self.TMP_FILE_EXT and '.{}'.format(self.TMP_FILE_EXT) not in dest: - dest += '.{}'.format(self.TMP_FILE_EXT) - return dest - - def get_dest_final_url(self, temp_file_url): - """Temporary destination file url to final destination file url""" - return re.sub(r'\.{}$'.format(self.TMP_FILE_EXT), '', temp_file_url) - def prepare_file_info(self, path, anatomy, sites): """ Prepare information for one file (asset or resource) @@ -991,11 +1047,6 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): """ file_hash = openpype.api.source_hash(path) - # todo: Avoid this logic - # Strip the temporary file extension from the file hash - if self.TMP_FILE_EXT and ',{}'.format(self.TMP_FILE_EXT) in file_hash: - file_hash = file_hash.replace(',{}'.format(self.TMP_FILE_EXT), '') - return { "_id": io.ObjectId(), "path": self.get_rootless_path(anatomy, path), @@ -1004,6 +1055,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): "sites": sites } + # region sync sites def compute_resource_sync_sites(self, instance): """Get available resource sync sites""" # Sync server logic @@ -1101,47 +1153,4 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): already_attached_sites[meta["name"]] = real_created return alternative_sites - - def handle_destination_files(self, destinations, mode): - """ Clean destination files - Called when error happened during integrating to DB or to disk - OR called to rename uploaded files from temporary name to final to - highlight publishing in progress/broken - Used to clean unwanted files - - Arguments: - destinations (list): file paths - mode: 'remove' - clean files, - 'finalize' - rename files, - remove TMP_FILE_EXT suffix denoting temp file - """ - if not destinations: - return - - for file_url in destinations: - if not os.path.exists(file_url): - self.log.debug( - "File {} was not found.".format(file_url) - ) - continue - - try: - if mode == 'remove': - self.log.debug("Removing file {}".format(file_url)) - os.remove(file_url) - if mode == 'finalize': - - new_name = self.get_dest_final_url(file_url) - if os.path.exists(new_name): - self.log.debug("Removing existing " - "file: {}".format(new_name)) - os.remove(new_name) - - self.log.debug( - "Renaming file {} to {}".format(file_url, new_name) - ) - os.rename(file_url, new_name) - except OSError: - self.log.error("Cannot {} file {}".format(mode, file_url), - exc_info=True) - six.reraise(*sys.exc_info()) + # endregion From 9f6cc5df3a11031fb18155c97e0a73bb6f3f6108 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 17 Mar 2022 11:51:06 +0100 Subject: [PATCH 026/785] Fix hound --- openpype/plugins/publish/integrate_new.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/openpype/plugins/publish/integrate_new.py b/openpype/plugins/publish/integrate_new.py index 500456eaed..e74b528ae7 100644 --- a/openpype/plugins/publish/integrate_new.py +++ b/openpype/plugins/publish/integrate_new.py @@ -126,7 +126,8 @@ class FileTransaction(object): self.log.debug("Copying file ... {} -> {}".format(src, dst)) copyfile(src, dst) elif opts["mode"] == self.MODE_HARDLINK: - self.log.debug("Hardlinking file ... {} -> {}".format(src, dst)) + self.log.debug("Hardlinking file ... {} -> {}".format(src, + dst)) create_hard_link(src, dst) self._transferred.append(dst) @@ -160,7 +161,7 @@ class FileTransaction(object): try: os.rename(backup, original) except OSError: - errors +=1 + errors += 1 self.log.error("Failed to restore original file: " "{} -> {}".format(backup, original), exc_info=True) From 56bcd8cec35f201ead80a18c09f6c070b76209c1 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 17 Mar 2022 16:30:49 +0100 Subject: [PATCH 027/785] Continue refactor, restore functionality - now can correctly publish as before (rudimentary tested only) --- openpype/plugins/publish/integrate_new.py | 136 +++++++++++----------- 1 file changed, 70 insertions(+), 66 deletions(-) diff --git a/openpype/plugins/publish/integrate_new.py b/openpype/plugins/publish/integrate_new.py index e74b528ae7..c550c1011c 100644 --- a/openpype/plugins/publish/integrate_new.py +++ b/openpype/plugins/publish/integrate_new.py @@ -101,7 +101,10 @@ class FileTransaction(object): "in queue: {} -> {}".format(src, dst)) return else: - self.log.warning("File transfer in queue overwritten") + self.log.warning("File transfer in queue replaced..") + self.log.debug("Removed from queue: " + "{} -> {}".format(queued_src, dst)) + self.log.debug("Added to queue: {} -> {}".format(src, dst)) self._transfers[dst] = (src, opts) @@ -298,7 +301,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): self.log.critical("Error when registering", exc_info=True) six.reraise(*sys.exc_info()) - # Finalizing can't be rollbacked safely so no use for moving it to + # Finalizing can't rollback safely so no use for moving it to # the try, except. file_transactions.finalize() @@ -426,11 +429,9 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): "type": "archived_representation" })) - # Find the representations to transfer amongst the files - # Each should be a single representation (as such, a single extension) + # Prepare all representations template_name, anatomy_data = self.prepare_anatomy(instance) - published_representations = {} - representations = [] + prepared_representations = [] for repre in instance.data["representations"]: if "delete" in repre.get("tags", []): @@ -438,6 +439,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): "{}".format(repre)) continue + # todo: reduce/simplify what is returned from this function prepared = self.prepare_representation(repre, anatomy_data, template_name, @@ -445,23 +447,23 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): version, instance_stagingdir, instance) - representation = prepared["representation"] - # todo: register the file transfers correctly - for src, dst in representation["transfers"]: - file_transactions.add(src, dst, - mode=file_transactions.MODE_COPY) - for src, dst in representation["hardlinks"]: - file_transactions.add(src, dst, - mode=file_transactions.MODE_HARDLINK) + for src, dst in prepared["transfers"]: + # todo: add support for hardlink transfers + file_transactions.add(src, dst) - # todo: simplify this? - representations.append(representation) - published_representations[representation["_id"]] = prepared + prepared_representations.append(prepared) - # could throw exception, will be caught in 'process' - # all integration to DB is being done together lower, - # so no rollback needed + # Each instance can also have pre-defined transfers not explicitly + # part of a representation - like texture resources used by a + # .ma representation. Those destination paths are pre-defined, etc. + # todo: should we move or simplify this logic? + for src, dst in instance.data.get("transfers", []): + file_transactions.add(src, dst, mode=FileTransaction.MODE_COPY) + for src, dst in instance.data.get("hardlinks", []): + file_transactions.add(src, dst, mode=FileTransaction.MODE_HARDLINK) + + # Process all file transfers of all integrations now self.log.debug("Integrating source files to destination ...") file_transactions.process() self.log.debug("Backup files " @@ -469,17 +471,21 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): self.log.debug("Integrated files " "{}".format(file_transactions.transferred)) - # todo: fix get file info for transferred files per representation - # currently it'd set all files for all representations - # get 'files' info for representation and all attached resources - integrated_files = file_transactions.transferred - self.log.debug("Preparing files information ...") - representation["files"] = self.get_files_info( - instance, - integrated_files - ) + # Finalize the representations now the published files are integrated + # Get 'files' info for representations and its attached resources + self.log.debug("Retrieving Representation files information ...") + sites = self.compute_resource_sync_sites(instance) + anatomy = instance.context.data["anatomy"] + representations = [] + for prepared in prepared_representations: + transfers = prepared["transfers"] + representation = prepared["representation"] + representation["files"] = self.get_files_info( + transfers, sites, anatomy + ) + representations.append(representation) - # Remove old representations if there are any (before insertion of new) + # Remove all archived representations if archived_repres: repre_ids_to_remove = [repre["_id"] for repre in archived_repres] io.delete_many({"_id": {"$in": repre_ids_to_remove}}) @@ -487,7 +493,11 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): # Write the new representations to the database io.insert_many(representations) - instance.data["published_representations"] = published_representations + # Backwards compatibility + # todo: can we avoid the need to store this? + instance.data["published_representations"] = { + p["representation"]["_id"]: p for p in prepared_representations + } self.log.info("Registered {} representations" "".format(len(representations))) @@ -495,7 +505,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): def register_version(self, instance, subset): version_number = instance.data["version"] - self.log.debug("Next version: v{}".format(version_number)) + self.log.debug("Version: v{0:03d}".format(version_number)) version_data = self.create_version_data(instance) version_data_instance = instance.data.get('versionData') @@ -565,6 +575,9 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): ) version = io.find_one({"_id": version_id}) + + self.log.info("Registered version: v{0:03d}".format(version["name"])) + return version def prepare_representation(self, repre, @@ -585,7 +598,8 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): if repre.get("transfers"): raise ValueError("Representation is not allowed to have transfers" - "data before integration. " + "data before integration. They are computed in " + "the integrator" "Got: {}".format(repre["transfers"])) # required representation keys @@ -698,18 +712,11 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): dst_collection.padding = destination_padding assert len(src_collection) == len(dst_collection), "This is a bug" + # Multiple file transfers transfers = [] for src_file_name, dst in zip(src_collection, dst_collection): src = os.path.join(stagingdir, src_file_name) - self.log.debug("source: {}".format(src)) - self.log.debug("destination: `{}`".format(dst)) - transfers.append(src, dst) - - # Store first frame as published path - # todo: remove `published_path` since it can be retrieved from - # `transfers` by taking the first destination transfers[0][1] - repre['published_path'] = next(iter(dst_collection)) - repre["transfers"].extend(transfers) + transfers.append((src, dst)) else: # Single file @@ -728,11 +735,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): dst = os.path.normpath(template_filled) # Single file transfer - self.log.debug("source: {}".format(src)) - self.log.debug("destination: `{}`".format(dst)) - repre["transfers"] = [src, dst] - - repre['published_path'] = dst + transfers = [(src, dst)] if repre.get("udim"): repre_context["udim"] = repre.get("udim") # store list @@ -753,11 +756,16 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): repre_id = _archived_repres["orig_id"] break + # Backwards compatibility: + # Store first transferred destination as published path data + # todo: can we remove this? + published_path = transfers[0][1] + # todo: `repre` is not the actual `representation` entity # we should simplify/clarify difference between data above # and the actual representation entity for the database data = repre.get("data") or {} - data.update({'path': repre["published_path"], 'template': template}) + data.update({'path': published_path, 'template': template}) representation = { "_id": repre_id, "schema": "openpype:representation-2.0", @@ -782,9 +790,10 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): return { "representation": representation, "anatomy_data": template_data, - # todo: avoid the need for 'published_files'? + "transfers": transfers, + # todo: avoid the need for 'published_files' used by Integrate Hero # backwards compatibility - "published_files": [transfer[1] for transfer in repre["transfers"]] + "published_files": [transfer[1] for transfer in transfers] } def _get_instance_families(self, instance): @@ -805,6 +814,8 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): # todo: rely less on self.prepare_anatomy to create this value asset = instance.data.get("assetEntity") # stored by prepare_anatomy subset_name = instance.data["subset"] + self.log.debug("Subset: {}".format(subset_name)) + subset = io.find_one({ "type": "subset", "parent": asset["_id"], @@ -838,6 +849,8 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): {"$set": {"data.families": families}} ) + self.log.info("Registered subset: {}".format(subset_name)) + return subset def _set_subset_group(self, instance, subset_id): @@ -871,9 +884,9 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): if not self.subset_grouping_profiles: return None + # TODO: Resolve below questions # QUESTION - # - is there a chance that task name is not filled in anatomy - # data? + # - is there a chance that task name is not filled in anatomy data? # - should we use context task in that case? anatomy_data = instance.data["anatomyData"] task_name = None @@ -1002,7 +1015,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): ).format(path)) return path - def get_files_info(self, instance): + def get_files_info(self, transfers, sites, anatomy): """ Prepare 'files' portion for attached resources and main asset. Combining records from 'transfers' and 'hardlinks' parts from instance. @@ -1017,21 +1030,12 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): output_resources: array of dictionaries to be added to 'files' key in representation """ - # todo: refactor to use transfers/hardlinks of representations - # currently broken logic - resources = list(instance.data.get("transfers", [])) - resources.extend(list(instance.data.get("hardlinks", []))) - self.log.debug("get_files_info.resources:{}".format(resources)) - - sites = self.compute_resource_sync_sites(instance) - - output_resources = [] - anatomy = instance.context.data["anatomy"] - for _src, dest in resources: + file_infos = [] + for _src, dest in transfers: file_info = self.prepare_file_info(dest, anatomy, sites=sites) - output_resources.append(file_info) + file_infos.append(file_info) - return output_resources + return file_infos def prepare_file_info(self, path, anatomy, sites): """ Prepare information for one file (asset or resource) From 8996280224aa30ad800e955ff165bdbe48bb8296 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 23 Mar 2022 23:38:05 +0100 Subject: [PATCH 028/785] Reduce duplicated logic by implementing `resolve_profile` method --- openpype/plugins/publish/integrate_new.py | 107 ++++++++++------------ 1 file changed, 48 insertions(+), 59 deletions(-) diff --git a/openpype/plugins/publish/integrate_new.py b/openpype/plugins/publish/integrate_new.py index 2142920a09..e43afbf7f6 100644 --- a/openpype/plugins/publish/integrate_new.py +++ b/openpype/plugins/publish/integrate_new.py @@ -359,17 +359,8 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): "short": task_code } - elif "task" in anatomy_data: - # Just set 'task_name' variable to context task - task_name = anatomy_data["task"]["name"] - task_type = anatomy_data["task"]["type"] - - else: - task_name = None - task_type = None - # Fill family in anatomy data - anatomy_data["family"] = instance.data.get("family") + anatomy_data["family"] = self.main_family_from_instance(instance) intent_value = instance.context.data.get("intent") if intent_value and isinstance(intent_value, dict): @@ -378,25 +369,44 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): if intent_value: anatomy_data["intent"] = intent_value - # Get profile - key_values = { - "families": self.main_family_from_instance(instance), - "tasks": task_name, - "hosts": instance.context.data["hostName"], - "task_types": task_type - } - profile = filter_profiles( - self.template_name_profiles, - key_values, - logger=self.log - ) - + profile, _ = self.resolve_profile(self.template_name_profiles, + instance) template_name = "publish" if profile: template_name = profile["template_name"] return template_name, anatomy_data + def resolve_profile(self, profiles, instance): + """Resolve profile by family, task name, host name and task type""" + + # Anatomy data is pre-filled by Collectors and `self.prepare_anatomy` + anatomy_data = instance.data["anatomyData"] + + # TODO: Resolve below questions + # QUESTION + # - is there a chance that task name is not filled in anatomy data? + # - should we use context task in that case? + # Task can be optional in anatomy data + task = anatomy_data.get("task", {}) + + filter_criteria = { + "families": anatomy_data["family"], + "tasks": task.get("name"), + "hosts": anatomy_data["host"], + "task_types": task.get("type") + } + # Get profile + profile = filter_profiles( + profiles, + filter_criteria, + logger=self.log + ) + + # TODO: See if we can simplify to avoid needing to return filter + # criteria used in `self._get_subset_group` + return profile, filter_criteria + def register(self, instance, file_transactions): instance_stagingdir = instance.data.get("stagingDir") @@ -886,50 +896,29 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): if not self.subset_grouping_profiles: return None - # TODO: Resolve below questions - # QUESTION - # - is there a chance that task name is not filled in anatomy data? - # - should we use context task in that case? - anatomy_data = instance.data["anatomyData"] - task_name = None - task_type = None - if "task" in anatomy_data: - task_name = anatomy_data["task"]["name"] - task_type = anatomy_data["task"]["type"] - filtering_criteria = { - "families": instance.data["family"], - "hosts": instance.context.data["hostName"], - "tasks": task_name, - "task_types": task_type - } - matching_profile = filter_profiles( - self.subset_grouping_profiles, - filtering_criteria - ) - # Skip if there is not matching profile - if not matching_profile: + # Skip if there is no matching profile + profile, criteria = self.resolve_profile(self.subset_grouping_profiles, + instance) + if not profile: return None - filled_template = None - template = matching_profile["template"] - fill_pairs = ( - ("family", filtering_criteria["families"]), - ("task", filtering_criteria["tasks"]), - ("host", filtering_criteria["hosts"]), - ("subset", instance.data["subset"]), - ("renderlayer", instance.data.get("renderlayer")) - ) - fill_pairs = prepare_template_data(fill_pairs) + template = profile["template"] + fill_pairs = prepare_template_data({ + "family": criteria["families"], + "task": criteria["tasks"], + "host": criteria["hosts"], + "subset": instance.data["subset"], + "renderlayer": instance.data.get("renderlayer") + }) + + filled_template = None try: filled_template = StringTemplate.format_strict_template( template, fill_pairs ) except (KeyError, TemplateUnsolved): - keys = [] - if fill_pairs: - keys = fill_pairs.keys() - + keys = fill_pairs.keys() msg = "Subset grouping failed. " \ "Only {} are expected in Settings".format(','.join(keys)) self.log.warning(msg) From 177e244bd80bf0b1d472948cba45f40dfecd672e Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 23 Mar 2022 23:45:24 +0100 Subject: [PATCH 029/785] Remove prepare anatomy data logic that is already collected/generated in CollectAnatomyContextData and CollectAnatomyInstanceData. This currently was duplicated logic and should not be handled in the Integrator --- openpype/plugins/publish/integrate_new.py | 51 +---------------------- 1 file changed, 1 insertion(+), 50 deletions(-) diff --git a/openpype/plugins/publish/integrate_new.py b/openpype/plugins/publish/integrate_new.py index e43afbf7f6..a1a116bd43 100644 --- a/openpype/plugins/publish/integrate_new.py +++ b/openpype/plugins/publish/integrate_new.py @@ -310,58 +310,9 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): def prepare_anatomy(self, instance): """Prepare anatomy data used to define representation destinations""" - context = instance.context - anatomy_data = instance.data["anatomyData"] - project_entity = instance.data["projectEntity"] - - context_asset_name = None - context_asset_doc = context.data.get("assetEntity") - if context_asset_doc: - context_asset_name = context_asset_doc["name"] - - asset_name = instance.data["asset"] - asset_entity = instance.data.get("assetEntity") - if not asset_entity or asset_entity["name"] != context_asset_name: - asset_entity = io.find_one({ - "type": "asset", - "name": asset_name, - "parent": project_entity["_id"] - }) - assert asset_entity, ( - "No asset found by the name \"{0}\" in project \"{1}\"" - ).format(asset_name, project_entity["name"]) - - instance.data["assetEntity"] = asset_entity - - # update anatomy data with asset specific keys - # - name should already been set - hierarchy = "" - parents = asset_entity["data"]["parents"] - if parents: - hierarchy = "/".join(parents) - anatomy_data["hierarchy"] = hierarchy - - # Make sure task name in anatomy data is same as on instance.data - asset_tasks = ( - asset_entity.get("data", {}).get("tasks") - ) or {} - task_name = instance.data.get("task") - if task_name: - task_info = asset_tasks.get(task_name) or {} - task_type = task_info.get("type") - - project_task_types = project_entity["config"]["tasks"] - task_code = project_task_types.get(task_type, {}).get("short_name") - anatomy_data["task"] = { - "name": task_name, - "type": task_type, - "short": task_code - } - - # Fill family in anatomy data - anatomy_data["family"] = self.main_family_from_instance(instance) + # TODO: This logic should move to CollectAnatomyContextData intent_value = instance.context.data.get("intent") if intent_value and isinstance(intent_value, dict): intent_value = intent_value.get("value") From 3fd2d020149e5b33c0be0ab7000376a0f30ed96f Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 23 Mar 2022 23:55:40 +0100 Subject: [PATCH 030/785] Move logic to clarify what should be removed/moved and bring logic closer to where it's used --- openpype/plugins/publish/integrate_new.py | 36 ++++++++++------------- 1 file changed, 16 insertions(+), 20 deletions(-) diff --git a/openpype/plugins/publish/integrate_new.py b/openpype/plugins/publish/integrate_new.py index a1a116bd43..e57fbaf294 100644 --- a/openpype/plugins/publish/integrate_new.py +++ b/openpype/plugins/publish/integrate_new.py @@ -293,6 +293,10 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): if families & set(self.exclude_families): return + # TODO: Avoid the need to do any adjustments to anatomy data + # Best case scenario that's all handled by collectors + self.prepare_anatomy(instance) + file_transactions = FileTransaction(log=self.log) try: self.register(instance, file_transactions) @@ -309,24 +313,12 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): def prepare_anatomy(self, instance): """Prepare anatomy data used to define representation destinations""" - - anatomy_data = instance.data["anatomyData"] - # TODO: This logic should move to CollectAnatomyContextData intent_value = instance.context.data.get("intent") if intent_value and isinstance(intent_value, dict): intent_value = intent_value.get("value") - - if intent_value: - anatomy_data["intent"] = intent_value - - profile, _ = self.resolve_profile(self.template_name_profiles, - instance) - template_name = "publish" - if profile: - template_name = profile["template_name"] - - return template_name, anatomy_data + if intent_value: + instance.data["anatomyData"]["intent"] = intent_value def resolve_profile(self, profiles, instance): """Resolve profile by family, task name, host name and task type""" @@ -382,6 +374,13 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): ) ) + # Define publish template name from profiles + profile, _ = self.resolve_profile(self.template_name_profiles, + instance) + template_name = "publish" + if profile: + template_name = profile["template_name"] + subset = self.register_subset(instance) version = self.register_version(instance, subset) @@ -393,7 +392,6 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): })) # Prepare all representations - template_name, anatomy_data = self.prepare_anatomy(instance) prepared_representations = [] for repre in instance.data["representations"]: @@ -404,7 +402,6 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): # todo: reduce/simplify what is returned from this function prepared = self.prepare_representation(repre, - anatomy_data, template_name, archived_repres, version, @@ -544,16 +541,12 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): return version def prepare_representation(self, repre, - anatomy_data, template_name, archived_repres, version, instance_stagingdir, instance): - # create template data for Anatomy - template_data = copy.deepcopy(anatomy_data) - # pre-flight validations if repre["ext"].startswith("."): raise ValueError("Extension must not start with a dot '.': " @@ -565,6 +558,9 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): "the integrator" "Got: {}".format(repre["transfers"])) + # create template data for Anatomy + template_data = copy.deepcopy(instance.data["anatomyData"]) + # required representation keys files = repre['files'] template_data["representation"] = repre["name"] From 8edfb3f7d3f926539f7f060725b3b7e0b1d697e5 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 24 Mar 2022 00:10:59 +0100 Subject: [PATCH 031/785] Simplify profile filtering --- openpype/plugins/publish/integrate_new.py | 42 +++++++++-------------- 1 file changed, 16 insertions(+), 26 deletions(-) diff --git a/openpype/plugins/publish/integrate_new.py b/openpype/plugins/publish/integrate_new.py index e57fbaf294..bdc045d1db 100644 --- a/openpype/plugins/publish/integrate_new.py +++ b/openpype/plugins/publish/integrate_new.py @@ -320,35 +320,21 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): if intent_value: instance.data["anatomyData"]["intent"] = intent_value - def resolve_profile(self, profiles, instance): - """Resolve profile by family, task name, host name and task type""" - - # Anatomy data is pre-filled by Collectors and `self.prepare_anatomy` + def get_profile_filter_criteria(self, instance): + """Return filter criteria for `filter_profiles`""" + # Anatomy data is pre-filled by Collectors anatomy_data = instance.data["anatomyData"] - # TODO: Resolve below questions - # QUESTION - # - is there a chance that task name is not filled in anatomy data? - # - should we use context task in that case? # Task can be optional in anatomy data task = anatomy_data.get("task", {}) - filter_criteria = { + # Return filter criteria + return { "families": anatomy_data["family"], "tasks": task.get("name"), "hosts": anatomy_data["host"], "task_types": task.get("type") } - # Get profile - profile = filter_profiles( - profiles, - filter_criteria, - logger=self.log - ) - - # TODO: See if we can simplify to avoid needing to return filter - # criteria used in `self._get_subset_group` - return profile, filter_criteria def register(self, instance, file_transactions): @@ -375,8 +361,10 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): ) # Define publish template name from profiles - profile, _ = self.resolve_profile(self.template_name_profiles, - instance) + filter_criteria = self.get_profile_filter_criteria(instance) + profile = filter_profiles(self.template_name_profiles, + filter_criteria, + logger=self.log) template_name = "publish" if profile: template_name = profile["template_name"] @@ -844,17 +832,19 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): return None # Skip if there is no matching profile - profile, criteria = self.resolve_profile(self.subset_grouping_profiles, - instance) + filter_criteria = self.get_profile_filter_criteria(instance) + profile = filter_profiles(self.subset_grouping_profiles, + filter_criteria, + logger=self.log) if not profile: return None template = profile["template"] fill_pairs = prepare_template_data({ - "family": criteria["families"], - "task": criteria["tasks"], - "host": criteria["hosts"], + "family": filter_criteria["families"], + "task": filter_criteria["tasks"], + "host": filter_criteria["hosts"], "subset": instance.data["subset"], "renderlayer": instance.data.get("renderlayer") }) From 79286ead4b91504afa30df711e8f751451f53552 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 24 Mar 2022 00:16:32 +0100 Subject: [PATCH 032/785] Re-use get families logic --- openpype/plugins/publish/integrate_new.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/openpype/plugins/publish/integrate_new.py b/openpype/plugins/publish/integrate_new.py index bdc045d1db..e66a71c483 100644 --- a/openpype/plugins/publish/integrate_new.py +++ b/openpype/plugins/publish/integrate_new.py @@ -286,10 +286,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): def process(self, instance): # Exclude instances that also contain families from exclude families - families = set( - # Consider family and families data - [instance.data["family"]] + instance.data.get("families", []) - ) + families = set(self._get_instance_families(instance)) if families & set(self.exclude_families): return From d6c682723de6eb025b21768ced54b2756373fba6 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 24 Mar 2022 00:19:16 +0100 Subject: [PATCH 033/785] Remove todo since assetEntity already comes from Collectors + re-use families variable --- openpype/plugins/publish/integrate_new.py | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/openpype/plugins/publish/integrate_new.py b/openpype/plugins/publish/integrate_new.py index e66a71c483..856f8af163 100644 --- a/openpype/plugins/publish/integrate_new.py +++ b/openpype/plugins/publish/integrate_new.py @@ -755,8 +755,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): return families def register_subset(self, instance): - # todo: rely less on self.prepare_anatomy to create this value - asset = instance.data.get("assetEntity") # stored by prepare_anatomy + asset = instance.data.get("assetEntity") subset_name = instance.data["subset"] self.log.debug("Subset: {}".format(subset_name)) @@ -766,9 +765,9 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): "name": subset_name }) + families = self._get_instance_families(instance) if subset is None: self.log.info("Subset '%s' not found, creating ..." % subset_name) - families = self._get_instance_families(instance) _id = io.insert_one({ "schema": "openpype:subset-3.0", @@ -786,8 +785,6 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): self._set_subset_group(instance, subset["_id"]) # Update families on subset. - families = [instance.data["family"]] - families.extend(instance.data.get("families", [])) io.update_many( {"type": "subset", "_id": ObjectId(subset["_id"])}, {"$set": {"data.families": families}} From 47259f8ef7b177892c76a8dbfde6d147cf664d39 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 24 Mar 2022 00:21:44 +0100 Subject: [PATCH 034/785] Add todo to move get subset group logic --- openpype/plugins/publish/integrate_new.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/openpype/plugins/publish/integrate_new.py b/openpype/plugins/publish/integrate_new.py index 856f8af163..91d2f3a943 100644 --- a/openpype/plugins/publish/integrate_new.py +++ b/openpype/plugins/publish/integrate_new.py @@ -821,6 +821,8 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): Attribute 'subset_grouping_profiles' is defined by OpenPype settings. """ + # TODO: This logic is better suited for a Collector to just store + # instance.data["subsetGroup"] # Skip if 'subset_grouping_profiles' is empty if not self.subset_grouping_profiles: return None From b128e0addffc77991e5ff25f2d219d8ed8613136 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 24 Mar 2022 14:21:32 +0100 Subject: [PATCH 035/785] Override stored repre context `udim` for backwards compatibility --- openpype/plugins/publish/integrate_new.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/openpype/plugins/publish/integrate_new.py b/openpype/plugins/publish/integrate_new.py index 91d2f3a943..e3abb8f04f 100644 --- a/openpype/plugins/publish/integrate_new.py +++ b/openpype/plugins/publish/integrate_new.py @@ -275,7 +275,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): exclude_families = ["clip"] db_representation_context_keys = [ "project", "asset", "task", "subset", "version", "representation", - "family", "hierarchy", "task", "username", "frame", "udim" + "family", "hierarchy", "task", "username", "frame" ] default_template_name = "publish" @@ -681,15 +681,17 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): # Single file transfer transfers = [(src, dst)] - if repre.get("udim"): - repre_context["udim"] = repre.get("udim") # store list - for key in self.db_representation_context_keys: value = template_data.get(key) if not value: continue repre_context[key] = template_data[key] + # Explicitly store the full list even though template data might + # have a different value + if repre.get("udim"): + repre_context["udim"] = repre.get("udim") # store list + # Define representation id repre_id = ObjectId() From 9997acbbeae32f1473c39df6cf78a8bfa7257aff Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 24 Mar 2022 14:22:49 +0100 Subject: [PATCH 036/785] Encapsulate version data completely into its own function --- openpype/plugins/publish/integrate_new.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/openpype/plugins/publish/integrate_new.py b/openpype/plugins/publish/integrate_new.py index e3abb8f04f..6e92f81b8b 100644 --- a/openpype/plugins/publish/integrate_new.py +++ b/openpype/plugins/publish/integrate_new.py @@ -452,17 +452,12 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): version_number = instance.data["version"] self.log.debug("Version: v{0:03d}".format(version_number)) - version_data = self.create_version_data(instance) - version_data_instance = instance.data.get('versionData') - if version_data_instance: - version_data.update(version_data_instance) - version = { "schema": "openpype:version-3.0", "type": "version", "parent": subset["_id"], "name": version_number, - "data": version_data + "data": self.create_version_data(instance) } repres = instance.data.get("representations", []) @@ -909,6 +904,11 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): if key in instance.data: version_data[key] = instance.data[key] + # Include instance.data[versionData] directly + version_data_instance = instance.data.get('versionData') + if version_data_instance: + version_data.update(version_data_instance) + return version_data def main_family_from_instance(self, instance): From 5b1f6eb30c459011fa685dcf325f39c4af72838e Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 24 Mar 2022 14:23:27 +0100 Subject: [PATCH 037/785] Move logic closer to where it's used --- openpype/plugins/publish/integrate_new.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/openpype/plugins/publish/integrate_new.py b/openpype/plugins/publish/integrate_new.py index 6e92f81b8b..a787f8d50d 100644 --- a/openpype/plugins/publish/integrate_new.py +++ b/openpype/plugins/publish/integrate_new.py @@ -460,9 +460,6 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): "data": self.create_version_data(instance) } - repres = instance.data.get("representations", []) - new_repre_names_low = [_repre["name"].lower() for _repre in repres] - existing_version = io.find_one({ 'type': 'version', 'parent': subset["_id"], @@ -488,6 +485,8 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): })) # Find representations of existing version and archive them + repres = instance.data.get("representations", []) + new_repre_names_low = [_repre["name"].lower() for _repre in repres] current_repres = io.find({ "type": "representation", "parent": version_id From 3369c15bdf837d6d8e83a8c054794e95fccd061b Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 24 Mar 2022 14:25:15 +0100 Subject: [PATCH 038/785] Preparation to delay Version document write to database closer to representation write --- openpype/plugins/publish/integrate_new.py | 24 ++++++++++++++--------- 1 file changed, 15 insertions(+), 9 deletions(-) diff --git a/openpype/plugins/publish/integrate_new.py b/openpype/plugins/publish/integrate_new.py index a787f8d50d..8dd2d57959 100644 --- a/openpype/plugins/publish/integrate_new.py +++ b/openpype/plugins/publish/integrate_new.py @@ -466,15 +466,16 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): 'name': version_number }) + bulk_writes = [] if existing_version is None: self.log.debug("Creating new version ...") - version_id = io.insert_one(version).inserted_id + version["_id"] = ObjectId() + bulk_writes.append(InsertOne(version)) else: self.log.debug("Updating existing version ...") # Check if instance have set `append` mode which cause that # only replicated representations are set to archive append_repres = instance.data.get("append", False) - bulk_writes = [] # Update version data version_id = existing_version['_id'] @@ -484,6 +485,12 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): '$set': version })) + # Instead of directly writing and querying we reproduce what + # the resulting version would look like so we can hold off making + # changes to the database to avoid the need for 'rollback' + version = copy.deepcopy(version) + version["_id"] = existing_version["_id"] + # Find representations of existing version and archive them repres = instance.data.get("representations", []) new_repre_names_low = [_repre["name"].lower() for _repre in repres] @@ -507,13 +514,12 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): repre["type"] = "archived_representation" bulk_writes.append(InsertOne(repre)) - # bulk updates - if bulk_writes: - io._database[io.Session["AVALON_PROJECT"]].bulk_write( - bulk_writes - ) - - version = io.find_one({"_id": version_id}) + # bulk updates + # todo: Try to avoid writing already until after we've prepared + # representations to allow easier rollback? + io._database[io.Session["AVALON_PROJECT"]].bulk_write( + bulk_writes + ) self.log.info("Registered version: v{0:03d}".format(version["name"])) From 42175ff6f829ce30ef61538243d7bd4b804c8e28 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 24 Mar 2022 14:41:56 +0100 Subject: [PATCH 039/785] Fix `get_profile_filter_criteria` anatomy data key for app name --- openpype/plugins/publish/integrate_new.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/plugins/publish/integrate_new.py b/openpype/plugins/publish/integrate_new.py index 8dd2d57959..e3dcfcc93c 100644 --- a/openpype/plugins/publish/integrate_new.py +++ b/openpype/plugins/publish/integrate_new.py @@ -329,7 +329,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): return { "families": anatomy_data["family"], "tasks": task.get("name"), - "hosts": anatomy_data["host"], + "hosts": anatomy_data["app"], "task_types": task.get("type") } From 7713af5a1dac4b0080dc6006f08811dcd9fc9d04 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 24 Mar 2022 17:23:02 +0100 Subject: [PATCH 040/785] Fix sequence functionality --- openpype/plugins/publish/integrate_new.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/openpype/plugins/publish/integrate_new.py b/openpype/plugins/publish/integrate_new.py index e3dcfcc93c..b5986a62ee 100644 --- a/openpype/plugins/publish/integrate_new.py +++ b/openpype/plugins/publish/integrate_new.py @@ -645,16 +645,20 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): repre_context = template_filled.used_values self.log.debug("Template filled: {}".format(str(template_filled))) dst_collections, _remainder = clique.assemble( - [os.path.normpath(template_filled)], minimum_items=1 + [os.path.normpath(template_filled)], + minimum_items=1, + patterns=[clique.PATTERNS["frames"]] ) assert not _remainder, "This is a bug" assert len(dst_collections) == 1, "This is a bug" dst_collection = dst_collections[0] # Update the destination indexes and padding - dst_collection.indexes = destination_indexes + dst_collection.indexes.clear() + dst_collection.indexes.update(set(destination_indexes)) dst_collection.padding = destination_padding - assert len(src_collection) == len(dst_collection), "This is a bug" + assert len(src_collection.indexes) == \ + len(dst_collection.indexes), "This is a bug" # Multiple file transfers transfers = [] From 229626bffdbc7e59c2206798b5bb3066a5602228 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 24 Mar 2022 17:36:01 +0100 Subject: [PATCH 041/785] Reformat code --- openpype/plugins/publish/integrate_new.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/openpype/plugins/publish/integrate_new.py b/openpype/plugins/publish/integrate_new.py index b5986a62ee..9e3e9de77c 100644 --- a/openpype/plugins/publish/integrate_new.py +++ b/openpype/plugins/publish/integrate_new.py @@ -657,8 +657,9 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): dst_collection.indexes.clear() dst_collection.indexes.update(set(destination_indexes)) dst_collection.padding = destination_padding - assert len(src_collection.indexes) == \ - len(dst_collection.indexes), "This is a bug" + assert ( + len(src_collection.indexes) == len(dst_collection.indexes) + ), "This is a bug" # Multiple file transfers transfers = [] From e1eb0887e0bdaaf012e95f289bdaddcf9089d65c Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 24 Mar 2022 20:26:10 +0100 Subject: [PATCH 042/785] Reduce database calls for register subset + prepare for bulk writes logic --- openpype/plugins/publish/integrate_new.py | 72 ++++++++++------------- 1 file changed, 31 insertions(+), 41 deletions(-) diff --git a/openpype/plugins/publish/integrate_new.py b/openpype/plugins/publish/integrate_new.py index 9e3e9de77c..44768df368 100644 --- a/openpype/plugins/publish/integrate_new.py +++ b/openpype/plugins/publish/integrate_new.py @@ -766,63 +766,53 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): subset_name = instance.data["subset"] self.log.debug("Subset: {}".format(subset_name)) + # Get existing subset if it exists subset = io.find_one({ "type": "subset", "parent": asset["_id"], "name": subset_name }) - families = self._get_instance_families(instance) - if subset is None: - self.log.info("Subset '%s' not found, creating ..." % subset_name) + # Define subset data + data = { + "families": self._get_instance_families(instance) + } - _id = io.insert_one({ + subset_group = instance.data.get("subsetGroup") + if not subset_group: + # todo: move _get_subset_group fallback to its own collector + subset_group = self._get_subset_group(instance) + if subset_group: + data["subsetGroup"] = subset_group + + if subset is None: + # Create a new subset + self.log.info("Subset '%s' not found, creating ..." % subset_name) + subset = { + "_id": ObjectId(), "schema": "openpype:subset-3.0", "type": "subset", "name": subset_name, - "data": { - "families": families - }, + "data": data, "parent": asset["_id"] - }).inserted_id + } + io.insert_one(subset) - subset = io.find_one({"_id": _id}) - - # Update subset group - self._set_subset_group(instance, subset["_id"]) - - # Update families on subset. - io.update_many( - {"type": "subset", "_id": ObjectId(subset["_id"])}, - {"$set": {"data.families": families}} - ) + else: + # Update existing subset data with new data and set in database. + # We also change the found subset in-place so we don't need to + # re-query the subset afterwards + subset["data"].update(data) + io.update_many( + {"type": "subset", "_id": subset["_id"]}, + {"$set": { + "data": subset["data"] + }} + ) self.log.info("Registered subset: {}".format(subset_name)) - return subset - def _set_subset_group(self, instance, subset_id): - """ - Mark subset as belonging to group in DB. - - Uses Settings > Global > Publish plugins > IntegrateAssetNew - - Args: - instance (dict): processed instance - subset_id (str): DB's subset _id - - """ - # Fist look into instance data - subset_group = instance.data.get("subsetGroup") - if not subset_group: - subset_group = self._get_subset_group(instance) - - if subset_group: - io.update_many({ - 'type': 'subset', - '_id': ObjectId(subset_id) - }, {'$set': {'data.subsetGroup': subset_group}}) - def _get_subset_group(self, instance): """Look into subset group profiles set by settings. From b906365f593025bf7bbba67ea6d8a907b717c98e Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 25 Mar 2022 21:42:39 +0100 Subject: [PATCH 043/785] Separate site sync logic further from Integrator plug-in (Draft) --- openpype/plugins/publish/integrate_new.py | 154 ++++++++++++---------- 1 file changed, 88 insertions(+), 66 deletions(-) diff --git a/openpype/plugins/publish/integrate_new.py b/openpype/plugins/publish/integrate_new.py index 44768df368..138a4fcc06 100644 --- a/openpype/plugins/publish/integrate_new.py +++ b/openpype/plugins/publish/integrate_new.py @@ -419,7 +419,12 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): # Finalize the representations now the published files are integrated # Get 'files' info for representations and its attached resources self.log.debug("Retrieving Representation files information ...") - sites = self.compute_resource_sync_sites(instance) + sites = SiteSync.compute_resource_sync_sites( + system_settings=instance.context.data["system_settings"], + project_settings=instance.context.data["project_settings"] + ) + log.debug("final sites:: {}".format(sites)) + anatomy = instance.context.data["anatomy"] representations = [] for prepared in prepared_representations: @@ -987,63 +992,65 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): "sites": sites } - # region sync sites - def compute_resource_sync_sites(self, instance): + +class SiteSync(object): + """Logic for Site Sync Module functionality""" + + @classmethod + def compute_resource_sync_sites(cls, + system_settings, + project_settings): """Get available resource sync sites""" - # Sync server logic - # TODO: Clean up sync settings - local_site = 'studio' # default - remote_site = None - always_accessible = [] - sync_project_presets = None - system_sync_server_presets = ( - instance.context.data["system_settings"] - ["modules"] - ["sync_server"]) + def create_metadata(name, created=True): + """Create sync site metadata for site with `name`""" + metadata = {"name": name} + if created: + metadata["created_dt"] = datetime.now() + return metadata + + default_sites = [create_metadata("studio")] + + # If sync site module is disabled return default fallback site + system_sync_server_presets = system_settings["modules"]["sync_server"] log.debug("system_sett:: {}".format(system_sync_server_presets)) + if not system_sync_server_presets["enabled"]: + return default_sites - if system_sync_server_presets["enabled"]: - sync_project_presets = ( - instance.context.data["project_settings"] - ["global"] - ["sync_server"]) + # If sync site module is disabled in current + # project return default fallback site + sync_project_presets = project_settings["global"]["sync_server"] + if not sync_project_presets["enabled"]: + return default_sites - if sync_project_presets and sync_project_presets["enabled"]: - local_site, remote_site = self._get_sites(sync_project_presets) - always_accessible = sync_project_presets["config"]. \ - get("always_accessible_on", []) + local_site, remote_site = cls._get_sites(sync_project_presets) - already_attached_sites = {} - meta = {"name": local_site, "created_dt": datetime.now()} - sites = [meta] - already_attached_sites[meta["name"]] = meta["created_dt"] + # Attached sites metadata by site name + # That is the local site, remote site, the always accesible sites + # and their alternate sites (alias of sites with different protocol) + attached_sites = dict() + attached_sites[local_site] = create_metadata(local_site) - if sync_project_presets and sync_project_presets["enabled"]: - if remote_site and \ - remote_site not in already_attached_sites.keys(): - # add remote - meta = {"name": remote_site.strip()} - sites.append(meta) - already_attached_sites[meta["name"]] = None + if remote_site and remote_site != local_site: + attached_sites[remote_site] = create_metadata(remote_site, + created=False) - # add skeleton for site where it should be always synced to - for always_on_site in always_accessible: - if always_on_site not in already_attached_sites.keys(): - meta = {"name": always_on_site.strip()} - sites.append(meta) - already_attached_sites[meta["name"]] = None + # add skeleton for sites where it should be always synced to + always_accessible_sites = ( + sync_project_presets["config"].get("always_accessible_on", []) + ) + for site in always_accessible_sites: + site = site.strip() + if site not in attached_sites: + attached_sites[site] = create_metadata(site, created=False) - # add alternative sites - alt = self._add_alternative_sites(system_sync_server_presets, - already_attached_sites) - sites.extend(alt) + # add alternative sites + cls._add_alternative_sites(system_sync_server_presets, attached_sites) - log.debug("final sites:: {}".format(sites)) + return list(attached_sites.values()) - return sites - - def _get_sites(self, sync_project_presets): + @staticmethod + def _get_sites(sync_project_presets): """Returns tuple (local_site, remote_site)""" local_site_id = openpype.api.get_local_site_id() local_site = sync_project_presets["config"]. \ @@ -1053,36 +1060,51 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): local_site = local_site_id remote_site = sync_project_presets["config"].get("remote_site") + if remote_site: + remote_site.strip() if remote_site == 'local': remote_site = local_site_id return local_site, remote_site - def _add_alternative_sites(self, - system_sync_server_presets, - already_attached_sites): + @staticmethod + def _add_alternative_sites(system_sync_server_presets, + attached_sites): """Loop through all configured sites and add alternatives. + For all sites if an alternative site is detected that has an + accessible site then we can also register to that alternative site + with the same "created" state. So we match the existing data. + See SyncServerModule.handle_alternate_site """ conf_sites = system_sync_server_presets.get("sites", {}) - alternative_sites = [] for site_name, site_info in conf_sites.items(): - alt_sites = set(site_info.get("alternative_sites", [])) - already_attached_keys = list(already_attached_sites.keys()) - for added_site in already_attached_keys: - if added_site in alt_sites: - if site_name in already_attached_keys: - continue - meta = {"name": site_name} - real_created = already_attached_sites[added_site] - # alt site inherits state of 'created_dt' - if real_created: - meta["created_dt"] = real_created - alternative_sites.append(meta) - already_attached_sites[meta["name"]] = real_created - return alternative_sites - # endregion + # Skip if already defined + if site_name in attached_sites: + continue + + # Get alternate sites (stripped names) for this site name + alt_sites = site_info.get("alternative_sites", []) + alt_sites = [site.strip() for site in alt_sites] + alt_sites = set(alt_sites) + + # If no alternative sites we don't need to add + if not alt_sites: + continue + + # Take a copy of data of the first alternate site that is already + # defined as an attached site to match the same state. + match_meta = next((attached_sites[site] for site in alt_sites + if site in attached_sites), None) + if not match_meta: + continue + + alt_site_meta = copy.deepcopy(match_meta) + alt_site_meta["name"] = site_name + + # Note: We change mutable `attached_site` dict in-place + attached_sites[site_name] = alt_site_meta From e0aaa5f6cc2fd2a2e6fa708364136d9d6235163d Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Sat, 26 Mar 2022 14:20:13 +0100 Subject: [PATCH 044/785] Move FileTransaction into lib --- openpype/lib/file_transaction.py | 171 ++++++++++++++++++++++ openpype/plugins/publish/integrate_new.py | 167 +-------------------- 2 files changed, 172 insertions(+), 166 deletions(-) create mode 100644 openpype/lib/file_transaction.py diff --git a/openpype/lib/file_transaction.py b/openpype/lib/file_transaction.py new file mode 100644 index 0000000000..57592e297f --- /dev/null +++ b/openpype/lib/file_transaction.py @@ -0,0 +1,171 @@ +import os +import logging +import sys +import errno +import six + +from openpype.lib import create_hard_link + +# this is needed until speedcopy for linux is fixed +if sys.platform == "win32": + from speedcopy import copyfile +else: + from shutil import copyfile + + +class FileTransaction(object): + """ + + The file transaction is a three step process. + + 1) Rename any existing files to a "temporary backup" during `process()` + 2) Copy the files to final destination during `process()` + 3) Remove any backed up files (*no rollback possible!) during `finalize()` + + Step 3 is done during `finalize()`. If not called the .bak files will + remain on disk. + + These steps try to ensure that we don't overwrite half of any existing + files e.g. if they are currently in use. + + Note: + A regular filesystem is *not* a transactional file system and even + though this implementation tries to produce a 'safe copy' with a + potential rollback do keep in mind that it's inherently unsafe due + to how filesystem works and a myriad of things could happen during + the transaction that break the logic. A file storage could go down, + permissions could be changed, other machines could be moving or writing + files. A lot can happen. + + Warning: + Any folders created during the transfer will not be removed. + + """ + + MODE_COPY = 0 + MODE_HARDLINK = 1 + + def __init__(self, log=None): + + if log is None: + log = logging.getLogger("FileTransaction") + + self.log = log + + # The transfer queue + # todo: make this an actual FIFO queue? + self._transfers = {} + + # Destination file paths that a file was transferred to + self._transferred = [] + + # Backup file location mapping to original locations + self._backup_to_original = {} + + def add(self, src, dst, mode=MODE_COPY): + """Add a new file to transfer queue""" + opts = {"mode": mode} + + src = os.path.normpath(src) + dst = os.path.normpath(dst) + + if dst in self._transfers: + queued_src = self._transfers[dst][0] + if src == queued_src: + self.log.debug("File transfer was already " + "in queue: {} -> {}".format(src, dst)) + return + else: + self.log.warning("File transfer in queue replaced..") + self.log.debug("Removed from queue: " + "{} -> {}".format(queued_src, dst)) + self.log.debug("Added to queue: {} -> {}".format(src, dst)) + + self._transfers[dst] = (src, opts) + + def process(self): + + # Backup any existing files + for dst in self._transfers.keys(): + if os.path.exists(dst): + # Backup original file + # todo: add timestamp or uuid to ensure unique + backup = dst + ".bak" + self._backup_to_original[backup] = dst + self.log.debug("Backup existing file: " + "{} -> {}".format(dst, backup)) + os.rename(dst, backup) + + # Copy the files to transfer + for dst, (src, opts) in self._transfers.items(): + self._create_folder_for_file(dst) + + if opts["mode"] == self.MODE_COPY: + self.log.debug("Copying file ... {} -> {}".format(src, dst)) + copyfile(src, dst) + elif opts["mode"] == self.MODE_HARDLINK: + self.log.debug("Hardlinking file ... {} -> {}".format(src, + dst)) + create_hard_link(src, dst) + + self._transferred.append(dst) + + def finalize(self): + # Delete any backed up files + for backup in self._backup_to_original.keys(): + try: + os.remove(backup) + except OSError: + self.log.error("Failed to remove backup file: " + "{}".format(backup), + exc_info=True) + + def rollback(self): + + errors = 0 + + # Rollback any transferred files + for path in self._transferred: + try: + os.remove(path) + except OSError: + errors += 1 + self.log.error("Failed to rollback created file: " + "{}".format(path), + exc_info=True) + + # Rollback the backups + for backup, original in self._backup_to_original.items(): + try: + os.rename(backup, original) + except OSError: + errors += 1 + self.log.error("Failed to restore original file: " + "{} -> {}".format(backup, original), + exc_info=True) + + if errors: + self.log.error("{} errors occurred during " + "rollback.".format(errors), exc_info=True) + six.reraise(*sys.exc_info()) + + @property + def transferred(self): + """Return the processed transfers destination paths""" + return list(self._transferred) + + @property + def backups(self): + """Return the backup file paths""" + return list(self._backup_to_original.keys()) + + def _create_folder_for_file(self, path): + dirname = os.path.dirname(path) + try: + os.makedirs(dirname) + except OSError as e: + if e.errno == errno.EEXIST: + pass + else: + self.log.critical("An unexpected error occurred.") + six.reraise(*sys.exc_info()) diff --git a/openpype/plugins/publish/integrate_new.py b/openpype/plugins/publish/integrate_new.py index 138a4fcc06..92976e6151 100644 --- a/openpype/plugins/publish/integrate_new.py +++ b/openpype/plugins/publish/integrate_new.py @@ -3,7 +3,6 @@ import logging import sys import copy import clique -import errno import six from bson.objectid import ObjectId @@ -13,19 +12,13 @@ from avalon import io import openpype.api from datetime import datetime from openpype.lib.profiles_filtering import filter_profiles +from openpype.lib.file_transaction import FileTransaction from openpype.lib import ( prepare_template_data, - create_hard_link, StringTemplate, TemplateUnsolved ) -# this is needed until speedcopy for linux is fixed -if sys.platform == "win32": - from speedcopy import copyfile -else: - from shutil import copyfile - log = logging.getLogger(__name__) @@ -40,164 +33,6 @@ def get_first_frame_padded(collection): return get_frame_padded(start_frame, padding=collection.padding) -class FileTransaction(object): - """ - - The file transaction is a three step process. - - 1) Rename any existing files to a "temporary backup" during `process()` - 2) Copy the files to final destination during `process()` - 3) Remove any backed up files (*no rollback possible!) during `finalize()` - - Step 3 is done during `finalize()`. If not called the .bak files will - remain on disk. - - These steps try to ensure that we don't overwrite half of any existing - files e.g. if they are currently in use. - - Note: - A regular filesystem is *not* a transactional file system and even - though this implementation tries to produce a 'safe copy' with a - potential rollback do keep in mind that it's inherently unsafe due - to how filesystem works and a myriad of things could happen during - the transaction that break the logic. A file storage could go down, - permissions could be changed, other machines could be moving or writing - files. A lot can happen. - - Warning: - Any folders created during the transfer will not be removed. - - """ - - MODE_COPY = 0 - MODE_HARDLINK = 1 - - def __init__(self, log=None): - - if log is None: - log = logging.getLogger("FileTransaction") - - self.log = log - - # The transfer queue - # todo: make this an actual FIFO queue? - self._transfers = {} - - # Destination file paths that a file was transferred to - self._transferred = [] - - # Backup file location mapping to original locations - self._backup_to_original = {} - - def add(self, src, dst, mode=MODE_COPY): - """Add a new file to transfer queue""" - opts = {"mode": mode} - - src = os.path.normpath(src) - dst = os.path.normpath(dst) - - if dst in self._transfers: - queued_src = self._transfers[dst][0] - if src == queued_src: - self.log.debug("File transfer was already " - "in queue: {} -> {}".format(src, dst)) - return - else: - self.log.warning("File transfer in queue replaced..") - self.log.debug("Removed from queue: " - "{} -> {}".format(queued_src, dst)) - self.log.debug("Added to queue: {} -> {}".format(src, dst)) - - self._transfers[dst] = (src, opts) - - def process(self): - - # Backup any existing files - for dst in self._transfers.keys(): - if os.path.exists(dst): - # Backup original file - # todo: add timestamp or uuid to ensure unique - backup = dst + ".bak" - self._backup_to_original[backup] = dst - self.log.debug("Backup existing file: " - "{} -> {}".format(dst, backup)) - os.rename(dst, backup) - - # Copy the files to transfer - for dst, (src, opts) in self._transfers.items(): - self._create_folder_for_file(dst) - - if opts["mode"] == self.MODE_COPY: - self.log.debug("Copying file ... {} -> {}".format(src, dst)) - copyfile(src, dst) - elif opts["mode"] == self.MODE_HARDLINK: - self.log.debug("Hardlinking file ... {} -> {}".format(src, - dst)) - create_hard_link(src, dst) - - self._transferred.append(dst) - - def finalize(self): - # Delete any backed up files - for backup in self._backup_to_original.keys(): - try: - os.remove(backup) - except OSError: - self.log.error("Failed to remove backup file: " - "{}".format(backup), - exc_info=True) - - def rollback(self): - - errors = 0 - - # Rollback any transferred files - for path in self._transferred: - try: - os.remove(path) - except OSError: - errors += 1 - self.log.error("Failed to rollback created file: " - "{}".format(path), - exc_info=True) - - # Rollback the backups - for backup, original in self._backup_to_original.items(): - try: - os.rename(backup, original) - except OSError: - errors += 1 - self.log.error("Failed to restore original file: " - "{} -> {}".format(backup, original), - exc_info=True) - - if errors: - self.log.error("{} errors occurred during " - "rollback.".format(errors), exc_info=True) - six.reraise(*sys.exc_info()) - - @property - def transferred(self): - """Return the processed transfers destination paths""" - return list(self._transferred) - - @property - def backups(self): - """Return the backup file paths""" - return list(self._backup_to_original.keys()) - - def _create_folder_for_file(self, path): - dirname = os.path.dirname(path) - try: - os.makedirs(dirname) - except OSError as e: - if e.errno == errno.EEXIST: - pass - else: - self.log.critical("An unexpected error occurred.") - six.reraise(*sys.exc_info()) - - class IntegrateAssetNew(pyblish.api.InstancePlugin): """Resolve any dependency issues From d3cb32ebe1df79408ff03fddef4d74a55fa1f4b6 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Sat, 26 Mar 2022 14:32:34 +0100 Subject: [PATCH 045/785] Collect subset group in a Collector instead of during Integrator --- .../plugins/publish/collect_subset_group.py | 100 ++++++++++++++++++ openpype/plugins/publish/integrate_new.py | 50 --------- 2 files changed, 100 insertions(+), 50 deletions(-) create mode 100644 openpype/plugins/publish/collect_subset_group.py diff --git a/openpype/plugins/publish/collect_subset_group.py b/openpype/plugins/publish/collect_subset_group.py new file mode 100644 index 0000000000..60c1c04e70 --- /dev/null +++ b/openpype/plugins/publish/collect_subset_group.py @@ -0,0 +1,100 @@ +"""Produces instance.data["subsetGroup"] data used during integration. + +Requires: + dict -> context["anatomyData"] *(pyblish.api.CollectorOrder + 0.49) + +Provides: + instance -> subsetGroup (str) + +""" +import pyblish.api + +from openpype.lib.profiles_filtering import filter_profiles +from openpype.lib import ( + prepare_template_data, + StringTemplate, + TemplateUnsolved +) + + +class CollectSubsetGroup(pyblish.api.ContextPlugin): + """Collect Subset Group for publish.""" + + # Run after CollectAnatomyInstanceData + order = pyblish.api.CollectorOrder + 0.495 + label = "Collect Subset Group" + + def process(self, instance): + """Look into subset group profiles set by settings. + + Attribute 'subset_grouping_profiles' is defined by OpenPype settings. + """ + + # TODO: Move this setting to this Collector instead of Integrator + project_settings = instance.context.data["project_settings"] + subset_grouping_profiles = ( + project_settings["global"] + ["publish"] + ["IntegrateAssetNew"] + ["subset_grouping_profiles"] + ) + + # Skip if 'subset_grouping_profiles' is empty + if not subset_grouping_profiles: + return + + # Skip if there is no matching profile + filter_criteria = self.get_profile_filter_criteria(instance) + profile = filter_profiles(subset_grouping_profiles, + filter_criteria, + logger=self.log) + if not profile: + return + + if instance.data.get("subsetGroup"): + # If subsetGroup is already set then allow that value to remain + self.log.debug("Skipping collect subset group due to existing " + "value: {}".format(instance.data["subsetGroup"])) + return + + template = profile["template"] + + fill_pairs = prepare_template_data({ + "family": filter_criteria["families"], + "task": filter_criteria["tasks"], + "host": filter_criteria["hosts"], + "subset": instance.data["subset"], + "renderlayer": instance.data.get("renderlayer") + }) + + filled_template = None + try: + filled_template = StringTemplate.format_strict_template( + template, fill_pairs + ) + except (KeyError, TemplateUnsolved): + keys = fill_pairs.keys() + msg = "Subset grouping failed. " \ + "Only {} are expected in Settings".format(','.join(keys)) + self.log.warning(msg) + + if filled_template: + instance.data["subsetGroup"] = filled_template + + def get_profile_filter_criteria(self, instance): + """Return filter criteria for `filter_profiles`""" + # TODO: This logic is used in much more plug-ins in one way or another + # Maybe better suited for lib? + # Anatomy data is pre-filled by Collectors + anatomy_data = instance.data["anatomyData"] + + # Task can be optional in anatomy data + task = anatomy_data.get("task", {}) + + # Return filter criteria + return { + "families": anatomy_data["family"], + "tasks": task.get("name"), + "hosts": anatomy_data["app"], + "task_types": task.get("type") + } diff --git a/openpype/plugins/publish/integrate_new.py b/openpype/plugins/publish/integrate_new.py index 92976e6151..284e110916 100644 --- a/openpype/plugins/publish/integrate_new.py +++ b/openpype/plugins/publish/integrate_new.py @@ -13,11 +13,6 @@ import openpype.api from datetime import datetime from openpype.lib.profiles_filtering import filter_profiles from openpype.lib.file_transaction import FileTransaction -from openpype.lib import ( - prepare_template_data, - StringTemplate, - TemplateUnsolved -) log = logging.getLogger(__name__) @@ -619,9 +614,6 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): } subset_group = instance.data.get("subsetGroup") - if not subset_group: - # todo: move _get_subset_group fallback to its own collector - subset_group = self._get_subset_group(instance) if subset_group: data["subsetGroup"] = subset_group @@ -653,48 +645,6 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): self.log.info("Registered subset: {}".format(subset_name)) return subset - def _get_subset_group(self, instance): - """Look into subset group profiles set by settings. - - Attribute 'subset_grouping_profiles' is defined by OpenPype settings. - """ - # TODO: This logic is better suited for a Collector to just store - # instance.data["subsetGroup"] - # Skip if 'subset_grouping_profiles' is empty - if not self.subset_grouping_profiles: - return None - - # Skip if there is no matching profile - filter_criteria = self.get_profile_filter_criteria(instance) - profile = filter_profiles(self.subset_grouping_profiles, - filter_criteria, - logger=self.log) - if not profile: - return None - - template = profile["template"] - - fill_pairs = prepare_template_data({ - "family": filter_criteria["families"], - "task": filter_criteria["tasks"], - "host": filter_criteria["hosts"], - "subset": instance.data["subset"], - "renderlayer": instance.data.get("renderlayer") - }) - - filled_template = None - try: - filled_template = StringTemplate.format_strict_template( - template, fill_pairs - ) - except (KeyError, TemplateUnsolved): - keys = fill_pairs.keys() - msg = "Subset grouping failed. " \ - "Only {} are expected in Settings".format(','.join(keys)) - self.log.warning(msg) - - return filled_template - def create_version_data(self, instance): """Create the data collection for the version From d7c5ad1f7c9913a39b43087cebbbee7971844f8c Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Sat, 26 Mar 2022 14:33:37 +0100 Subject: [PATCH 046/785] Remove duplicate "source" in families --- openpype/plugins/publish/integrate_new.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/plugins/publish/integrate_new.py b/openpype/plugins/publish/integrate_new.py index 284e110916..08088479d0 100644 --- a/openpype/plugins/publish/integrate_new.py +++ b/openpype/plugins/publish/integrate_new.py @@ -86,7 +86,6 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): "source", "matchmove", "image", - "source", "assembly", "fbx", "textures", From 8fffc60b5016d63d6fad2b8c3b399537a3736171 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Sat, 26 Mar 2022 14:37:23 +0100 Subject: [PATCH 047/785] Move remainder of prepare anatomy data to the Collector --- .../plugins/publish/collect_anatomy_context_data.py | 6 ++++++ openpype/plugins/publish/integrate_new.py | 13 ------------- 2 files changed, 6 insertions(+), 13 deletions(-) diff --git a/openpype/plugins/publish/collect_anatomy_context_data.py b/openpype/plugins/publish/collect_anatomy_context_data.py index bd8d9e50c4..346caf6b83 100644 --- a/openpype/plugins/publish/collect_anatomy_context_data.py +++ b/openpype/plugins/publish/collect_anatomy_context_data.py @@ -91,5 +91,11 @@ class CollectAnatomyContextData(pyblish.api.ContextPlugin): } }) + intent = context.data.get("intent") + if intent and isinstance(intent, dict): + intent_value = intent.get("value") + if intent_value: + context_data["intent"] = intent_value + self.log.info("Global anatomy Data collected") self.log.debug(json.dumps(context_data, indent=4)) diff --git a/openpype/plugins/publish/integrate_new.py b/openpype/plugins/publish/integrate_new.py index 08088479d0..f598c540e5 100644 --- a/openpype/plugins/publish/integrate_new.py +++ b/openpype/plugins/publish/integrate_new.py @@ -119,10 +119,6 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): if families & set(self.exclude_families): return - # TODO: Avoid the need to do any adjustments to anatomy data - # Best case scenario that's all handled by collectors - self.prepare_anatomy(instance) - file_transactions = FileTransaction(log=self.log) try: self.register(instance, file_transactions) @@ -137,15 +133,6 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): # the try, except. file_transactions.finalize() - def prepare_anatomy(self, instance): - """Prepare anatomy data used to define representation destinations""" - # TODO: This logic should move to CollectAnatomyContextData - intent_value = instance.context.data.get("intent") - if intent_value and isinstance(intent_value, dict): - intent_value = intent_value.get("value") - if intent_value: - instance.data["anatomyData"]["intent"] = intent_value - def get_profile_filter_criteria(self, instance): """Return filter criteria for `filter_profiles`""" # Anatomy data is pre-filled by Collectors From 177e83ec8bf55e28ca551affefc4ac775570fe98 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Sat, 26 Mar 2022 14:43:00 +0100 Subject: [PATCH 048/785] Restore "published_path" backwards compatibility for IntegrateFtrackInstance on Farm --- openpype/plugins/publish/integrate_new.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/plugins/publish/integrate_new.py b/openpype/plugins/publish/integrate_new.py index f598c540e5..05cbb357e3 100644 --- a/openpype/plugins/publish/integrate_new.py +++ b/openpype/plugins/publish/integrate_new.py @@ -532,6 +532,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): # Store first transferred destination as published path data # todo: can we remove this? published_path = transfers[0][1] + repre["published_path"] = published_path # Backwards compatibility # todo: `repre` is not the actual `representation` entity # we should simplify/clarify difference between data above From 7189954a3c29ca00139a9a50b58606a3c335de04 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Sat, 26 Mar 2022 14:44:19 +0100 Subject: [PATCH 049/785] Use `os.path.abspath` instead of `os.path.normpath` when adding a transfer --- openpype/lib/file_transaction.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/lib/file_transaction.py b/openpype/lib/file_transaction.py index 57592e297f..1626bec6b6 100644 --- a/openpype/lib/file_transaction.py +++ b/openpype/lib/file_transaction.py @@ -66,8 +66,8 @@ class FileTransaction(object): """Add a new file to transfer queue""" opts = {"mode": mode} - src = os.path.normpath(src) - dst = os.path.normpath(dst) + src = os.path.abspath(src) + dst = os.path.abspath(dst) if dst in self._transfers: queued_src = self._transfers[dst][0] From 8f8b578f0ce660b1c8182ad2486aca21ed1828e2 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Sat, 26 Mar 2022 19:58:55 +0100 Subject: [PATCH 050/785] Move Subset Grouping Profiles settings to Collect Subset Group - This is moved from the Integrate Asset New settings --- .../plugins/publish/collect_subset_group.py | 16 +-- openpype/plugins/publish/integrate_new.py | 1 - .../defaults/project_settings/global.json | 20 ++-- .../schemas/schema_global_publish.json | 101 ++++++++++-------- 4 files changed, 71 insertions(+), 67 deletions(-) diff --git a/openpype/plugins/publish/collect_subset_group.py b/openpype/plugins/publish/collect_subset_group.py index 60c1c04e70..075699e304 100644 --- a/openpype/plugins/publish/collect_subset_group.py +++ b/openpype/plugins/publish/collect_subset_group.py @@ -24,28 +24,22 @@ class CollectSubsetGroup(pyblish.api.ContextPlugin): order = pyblish.api.CollectorOrder + 0.495 label = "Collect Subset Group" + # Defined in OpenPype settings + subset_grouping_profiles = None + def process(self, instance): """Look into subset group profiles set by settings. Attribute 'subset_grouping_profiles' is defined by OpenPype settings. """ - # TODO: Move this setting to this Collector instead of Integrator - project_settings = instance.context.data["project_settings"] - subset_grouping_profiles = ( - project_settings["global"] - ["publish"] - ["IntegrateAssetNew"] - ["subset_grouping_profiles"] - ) - # Skip if 'subset_grouping_profiles' is empty - if not subset_grouping_profiles: + if not self.subset_grouping_profiles: return # Skip if there is no matching profile filter_criteria = self.get_profile_filter_criteria(instance) - profile = filter_profiles(subset_grouping_profiles, + profile = filter_profiles(self.subset_grouping_profiles, filter_criteria, logger=self.log) if not profile: diff --git a/openpype/plugins/publish/integrate_new.py b/openpype/plugins/publish/integrate_new.py index 05cbb357e3..4706d4d093 100644 --- a/openpype/plugins/publish/integrate_new.py +++ b/openpype/plugins/publish/integrate_new.py @@ -110,7 +110,6 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): # Attributes set by settings template_name_profiles = None - subset_grouping_profiles = None def process(self, instance): diff --git a/openpype/settings/defaults/project_settings/global.json b/openpype/settings/defaults/project_settings/global.json index 30a71b044a..528df111f0 100644 --- a/openpype/settings/defaults/project_settings/global.json +++ b/openpype/settings/defaults/project_settings/global.json @@ -20,6 +20,17 @@ ], "skip_hosts_headless_publish": [] }, + "CollectSubsetGroup": { + "subset_grouping_profiles": [ + { + "families": [], + "hosts": [], + "task_types": [], + "tasks": [], + "template": "" + } + ] + }, "ValidateEditorialAssetName": { "enabled": true, "optional": false @@ -193,15 +204,6 @@ "tasks": [], "template_name": "render" } - ], - "subset_grouping_profiles": [ - { - "families": [], - "hosts": [], - "task_types": [], - "tasks": [], - "template": "" - } ] }, "CleanUp": { diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json index 12043d4205..ab968037f6 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json @@ -39,6 +39,61 @@ } ] }, + { + "type": "dict", + "collapsible": true, + "key": "CollectSubsetGroup", + "label": "Collect Subset Group", + "is_group": true, + "children": [ + { + "type": "list", + "key": "subset_grouping_profiles", + "label": "Subset grouping profiles", + "use_label_wrap": true, + "object_type": { + "type": "dict", + "children": [ + { + "type": "label", + "label": "Set all published instances as a part of specific group named according to 'Template'.
Implemented all variants of placeholders [{task},{family},{host},{subset},{renderlayer}]" + }, + { + "key": "families", + "label": "Families", + "type": "list", + "object_type": "text" + }, + { + "type": "hosts-enum", + "key": "hosts", + "label": "Hosts", + "multiselection": true + }, + { + "key": "task_types", + "label": "Task types", + "type": "task-types-enum" + }, + { + "key": "tasks", + "label": "Task names", + "type": "list", + "object_type": "text" + }, + { + "type": "separator" + }, + { + "type": "text", + "key": "template", + "label": "Template" + } + ] + } + } + ] + }, { "type": "dict", "collapsible": true, @@ -603,52 +658,6 @@ } ] } - }, - { - "type": "list", - "key": "subset_grouping_profiles", - "label": "Subset grouping profiles", - "use_label_wrap": true, - "object_type": { - "type": "dict", - "children": [ - { - "type": "label", - "label": "Set all published instances as a part of specific group named according to 'Template'.
Implemented all variants of placeholders [{task},{family},{host},{subset},{renderlayer}]" - }, - { - "key": "families", - "label": "Families", - "type": "list", - "object_type": "text" - }, - { - "type": "hosts-enum", - "key": "hosts", - "label": "Hosts", - "multiselection": true - }, - { - "key": "task_types", - "label": "Task types", - "type": "task-types-enum" - }, - { - "key": "tasks", - "label": "Task names", - "type": "list", - "object_type": "text" - }, - { - "type": "separator" - }, - { - "type": "text", - "key": "template", - "label": "Template" - } - ] - } } ] }, From 6ff7167d54e8a70441300ba4d21acb5a01eb5071 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Sat, 26 Mar 2022 20:09:08 +0100 Subject: [PATCH 051/785] Separate get_template_name into its own method + use `self.default_template_name` --- openpype/plugins/publish/integrate_new.py | 22 ++++++++++++++-------- 1 file changed, 14 insertions(+), 8 deletions(-) diff --git a/openpype/plugins/publish/integrate_new.py b/openpype/plugins/publish/integrate_new.py index 4706d4d093..c1fa7ccaf2 100644 --- a/openpype/plugins/publish/integrate_new.py +++ b/openpype/plugins/publish/integrate_new.py @@ -172,14 +172,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): ) ) - # Define publish template name from profiles - filter_criteria = self.get_profile_filter_criteria(instance) - profile = filter_profiles(self.template_name_profiles, - filter_criteria, - logger=self.log) - template_name = "publish" - if profile: - template_name = profile["template_name"] + template_name = self._get_template_name(instance) subset = self.register_subset(instance) @@ -582,6 +575,19 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): return families + def _get_template_name(self, instance): + """Return anatomy template name to use for integration""" + + # Define publish template name from profiles + filter_criteria = self.get_profile_filter_criteria(instance) + profile = filter_profiles(self.template_name_profiles, + filter_criteria, + logger=self.log) + template_name = self.default_template_name + if profile: + template_name = profile["template_name"] + return template_name + def register_subset(self, instance): asset = instance.data.get("assetEntity") subset_name = instance.data["subset"] From 821293d3b855acf2cadd914328a975fd619acd56 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Sat, 26 Mar 2022 20:09:31 +0100 Subject: [PATCH 052/785] Match comment from Integrator for consistency --- openpype/plugins/publish/collect_subset_group.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/plugins/publish/collect_subset_group.py b/openpype/plugins/publish/collect_subset_group.py index 075699e304..5756563ed3 100644 --- a/openpype/plugins/publish/collect_subset_group.py +++ b/openpype/plugins/publish/collect_subset_group.py @@ -24,7 +24,7 @@ class CollectSubsetGroup(pyblish.api.ContextPlugin): order = pyblish.api.CollectorOrder + 0.495 label = "Collect Subset Group" - # Defined in OpenPype settings + # Attributes set by settings subset_grouping_profiles = None def process(self, instance): From c3e0162c436a081ccf809cd429f5b828202569d0 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Sat, 26 Mar 2022 20:11:31 +0100 Subject: [PATCH 053/785] Debug log when exclude family was found for the instance --- openpype/plugins/publish/integrate_new.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/openpype/plugins/publish/integrate_new.py b/openpype/plugins/publish/integrate_new.py index c1fa7ccaf2..8a71c0d5aa 100644 --- a/openpype/plugins/publish/integrate_new.py +++ b/openpype/plugins/publish/integrate_new.py @@ -115,7 +115,10 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): # Exclude instances that also contain families from exclude families families = set(self._get_instance_families(instance)) - if families & set(self.exclude_families): + exclude = families & set(self.exclude_families) + if exclude: + self.log.debug("Instance not integrated due to exclude " + "families found: {}".format(", ".join(exclude))) return file_transactions = FileTransaction(log=self.log) From fbdb385e5b855c0762583256311501b78a2ca730 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Sat, 26 Mar 2022 20:20:00 +0100 Subject: [PATCH 054/785] Perform database registering of Subset and Version in a single Bulk Write --- openpype/plugins/publish/integrate_new.py | 30 +++++++++++------------ 1 file changed, 15 insertions(+), 15 deletions(-) diff --git a/openpype/plugins/publish/integrate_new.py b/openpype/plugins/publish/integrate_new.py index 8a71c0d5aa..6f1d745b9a 100644 --- a/openpype/plugins/publish/integrate_new.py +++ b/openpype/plugins/publish/integrate_new.py @@ -177,11 +177,17 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): template_name = self._get_template_name(instance) - subset = self.register_subset(instance) - - version = self.register_version(instance, subset) + subset, subset_writes = self.register_subset(instance) + version, version_writes = self.register_version(instance, subset) instance.data["versionEntity"] = version + # Bulk write to the database + # todo: Try to avoid writing already until after we've prepared + # representations to allow easier rollback? + io._database[io.Session["AVALON_PROJECT"]].bulk_write( + subset_writes + version_writes + ) + archived_repres = list(io.find({ "parent": version["_id"], "type": "archived_representation" @@ -330,16 +336,9 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): repre["type"] = "archived_representation" bulk_writes.append(InsertOne(repre)) - # bulk updates - # todo: Try to avoid writing already until after we've prepared - # representations to allow easier rollback? - io._database[io.Session["AVALON_PROJECT"]].bulk_write( - bulk_writes - ) - self.log.info("Registered version: v{0:03d}".format(version["name"])) - return version + return version, bulk_writes def prepare_representation(self, repre, template_name, @@ -612,6 +611,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): if subset_group: data["subsetGroup"] = subset_group + bulk_writes = [] if subset is None: # Create a new subset self.log.info("Subset '%s' not found, creating ..." % subset_name) @@ -623,22 +623,22 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): "data": data, "parent": asset["_id"] } - io.insert_one(subset) + bulk_writes.append(InsertOne(subset)) else: # Update existing subset data with new data and set in database. # We also change the found subset in-place so we don't need to # re-query the subset afterwards subset["data"].update(data) - io.update_many( + bulk_writes.append(UpdateOne( {"type": "subset", "_id": subset["_id"]}, {"$set": { "data": subset["data"] }} - ) + )) self.log.info("Registered subset: {}".format(subset_name)) - return subset + return subset, bulk_writes def create_version_data(self, instance): """Create the data collection for the version From 1844281c68d0e357eccdc8c277db278ef0651f31 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Sat, 26 Mar 2022 20:41:22 +0100 Subject: [PATCH 055/785] Match assertion for collection of files (allow no absolute paths) similar to single files --- openpype/plugins/publish/integrate_new.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/openpype/plugins/publish/integrate_new.py b/openpype/plugins/publish/integrate_new.py index 6f1d745b9a..ead00452da 100644 --- a/openpype/plugins/publish/integrate_new.py +++ b/openpype/plugins/publish/integrate_new.py @@ -398,6 +398,10 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): is_sequence_representation = isinstance(files, (list, tuple)) if is_sequence_representation: # Collection of files (sequence) + assert not any(os.path.isabs(fname) for fname in files), ( + "Given file names contain full paths" + ) + # Get the sequence as a collection. The files must be of a single # sequence and have no remainder outside of the collections. collections, remainder = clique.assemble(files, From 8e0161bec7353bff8bc581d4d676b3ba7c090ba8 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 28 Mar 2022 15:04:24 +0200 Subject: [PATCH 056/785] Also Bulk Write representation changes + more cleanup - Don't create intermediate archived representations - Move writing of Subset + Version to database to just before file transactions - Perform ReplaceOne for version instead of update with "$set" for the full version --- openpype/plugins/publish/integrate_new.py | 166 ++++++++++------------ 1 file changed, 79 insertions(+), 87 deletions(-) diff --git a/openpype/plugins/publish/integrate_new.py b/openpype/plugins/publish/integrate_new.py index ead00452da..7a3ca2bdf7 100644 --- a/openpype/plugins/publish/integrate_new.py +++ b/openpype/plugins/publish/integrate_new.py @@ -6,7 +6,7 @@ import clique import six from bson.objectid import ObjectId -from pymongo import DeleteOne, InsertOne, UpdateOne +from pymongo import DeleteMany, ReplaceOne, InsertOne, UpdateOne import pyblish.api from avalon import io import openpype.api @@ -28,6 +28,11 @@ def get_first_frame_padded(collection): return get_frame_padded(start_frame, padding=collection.padding) +def bulk_write(writes): + """Convenience function to bulk write into active project database""" + return io._database[io.Session["AVALON_PROJECT"]].bulk_write(writes) + + class IntegrateAssetNew(pyblish.api.InstancePlugin): """Resolve any dependency issues @@ -177,21 +182,17 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): template_name = self._get_template_name(instance) - subset, subset_writes = self.register_subset(instance) - version, version_writes = self.register_version(instance, subset) + subset, subset_writes = self.prepare_subset(instance) + version, version_writes = self.prepare_version(instance, subset) instance.data["versionEntity"] = version - # Bulk write to the database - # todo: Try to avoid writing already until after we've prepared - # representations to allow easier rollback? - io._database[io.Session["AVALON_PROJECT"]].bulk_write( - subset_writes + version_writes - ) - - archived_repres = list(io.find({ - "parent": version["_id"], - "type": "archived_representation" - })) + # Get existing representations (if any) + existing_repres_by_name = { + repres["name"].lower(): repres for repres in io.find({ + "parent": version["_id"], + "type": "representation" + }) + } # Prepare all representations prepared_representations = [] @@ -205,7 +206,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): # todo: reduce/simplify what is returned from this function prepared = self.prepare_representation(repre, template_name, - archived_repres, + existing_repres_by_name, version, instance_stagingdir, instance) @@ -225,40 +226,70 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): for src, dst in instance.data.get("hardlinks", []): file_transactions.add(src, dst, mode=FileTransaction.MODE_HARDLINK) + # Bulk write to the database + # todo: Can we move this even to after the file transfers? + bulk_write(subset_writes + version_writes) + self.log.info("Subset {subset[name]} and Version {version[name]} " + "written to database..".format(subset=subset, + version=version)) + # Process all file transfers of all integrations now self.log.debug("Integrating source files to destination ...") file_transactions.process() - self.log.debug("Backup files " + self.log.debug("Backed up existing files: " "{}".format(file_transactions.backups)) - self.log.debug("Integrated files " + self.log.debug("Transferred files: " "{}".format(file_transactions.transferred)) # Finalize the representations now the published files are integrated # Get 'files' info for representations and its attached resources - self.log.debug("Retrieving Representation files information ...") + self.log.debug("Retrieving Representation Site Sync information ...") sites = SiteSync.compute_resource_sync_sites( system_settings=instance.context.data["system_settings"], project_settings=instance.context.data["project_settings"] ) - log.debug("final sites:: {}".format(sites)) + self.log.debug("final sites:: {}".format(sites)) anatomy = instance.context.data["anatomy"] - representations = [] + representation_writes = [] + new_repre_names_low = set() for prepared in prepared_representations: transfers = prepared["transfers"] representation = prepared["representation"] representation["files"] = self.get_files_info( transfers, sites, anatomy ) - representations.append(representation) - # Remove all archived representations - if archived_repres: - repre_ids_to_remove = [repre["_id"] for repre in archived_repres] - io.delete_many({"_id": {"$in": repre_ids_to_remove}}) + # Set up representation for writing to the database. Since + # we *might* be overwriting an existing entry if the version + # already existed we'll use ReplaceOnce with `upsert=True` + representation_writes.append(ReplaceOne( + filter={"_id": representation["_id"]}, + replacement=representation, + upsert=True + )) - # Write the new representations to the database - io.insert_many(representations) + new_repre_names_low.add(representation["name"].lower()) + + # Delete any existing representations that didn't get any new data + # if the instance is not set to append mode + if not instance.data.get("append", False): + delete_names = set() + for name, existing_repres in existing_repres_by_name.items(): + if name not in new_repre_names_low: + # We add the exact representation name because `name` is + # lowercase for name matching only and not in the database + delete_names.add(existing_repres["name"]) + if delete_names: + representation_writes.append(DeleteMany( + filter={ + "parent": version["_id"], + "name": {"$in": list(delete_names)} + } + )) + + # Write representations to the database + bulk_write(representation_writes) # Backwards compatibility # todo: can we avoid the need to store this? @@ -267,12 +298,11 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): } self.log.info("Registered {} representations" - "".format(len(representations))) + "".format(len(prepared_representations))) - def register_version(self, instance, subset): + def prepare_version(self, instance, subset): version_number = instance.data["version"] - self.log.debug("Version: v{0:03d}".format(version_number)) version = { "schema": "openpype:version-3.0", @@ -288,61 +318,26 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): 'name': version_number }) - bulk_writes = [] - if existing_version is None: + if existing_version: + self.log.debug("Updating existing version ...") + version["_id"] = existing_version["_id"] + else: self.log.debug("Creating new version ...") version["_id"] = ObjectId() - bulk_writes.append(InsertOne(version)) - else: - self.log.debug("Updating existing version ...") - # Check if instance have set `append` mode which cause that - # only replicated representations are set to archive - append_repres = instance.data.get("append", False) - # Update version data - version_id = existing_version['_id'] - bulk_writes.append(UpdateOne({ - '_id': version_id - }, { - '$set': version - })) + bulk_writes = [ReplaceOne( + filter={"_id": version["_id"]}, + replacement=version, + upsert=True + )] - # Instead of directly writing and querying we reproduce what - # the resulting version would look like so we can hold off making - # changes to the database to avoid the need for 'rollback' - version = copy.deepcopy(version) - version["_id"] = existing_version["_id"] - - # Find representations of existing version and archive them - repres = instance.data.get("representations", []) - new_repre_names_low = [_repre["name"].lower() for _repre in repres] - current_repres = io.find({ - "type": "representation", - "parent": version_id - }) - for repre in current_repres: - if append_repres: - # archive only duplicated representations - if repre["name"].lower() not in new_repre_names_low: - continue - # Representation must change type, - # `_id` must be stored to other key and replaced with new - # - that is because new representations should have same ID - repre_id = repre["_id"] - bulk_writes.append(DeleteOne({"_id": repre_id})) - - repre["orig_id"] = repre_id - repre["_id"] = ObjectId() - repre["type"] = "archived_representation" - bulk_writes.append(InsertOne(repre)) - - self.log.info("Registered version: v{0:03d}".format(version["name"])) + self.log.info("Prepared version: v{0:03d}".format(version["name"])) return version, bulk_writes def prepare_representation(self, repre, template_name, - archived_repres, + existing_repres_by_name, version, instance_stagingdir, instance): @@ -516,15 +511,12 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): if repre.get("udim"): repre_context["udim"] = repre.get("udim") # store list - # Define representation id - repre_id = ObjectId() - # Use previous representation's id if there is a name match - repre_name_lower = repre["name"].lower() - for _archived_repres in archived_repres: - if repre_name_lower == _archived_repres["name"].lower(): - repre_id = _archived_repres["orig_id"] - break + existing = existing_repres_by_name.get(repre["name"].lower()) + if existing: + repre_id = existing["_id"] + else: + repre_id = ObjectId() # Backwards compatibility: # Store first transferred destination as published path data @@ -594,7 +586,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): template_name = profile["template_name"] return template_name - def register_subset(self, instance): + def prepare_subset(self, instance): asset = instance.data.get("assetEntity") subset_name = instance.data["subset"] self.log.debug("Subset: {}".format(subset_name)) @@ -631,7 +623,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): else: # Update existing subset data with new data and set in database. - # We also change the found subset in-place so we don't need to + # We also change the found subset in-place so we don't need to # re-query the subset afterwards subset["data"].update(data) bulk_writes.append(UpdateOne( @@ -641,7 +633,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): }} )) - self.log.info("Registered subset: {}".format(subset_name)) + self.log.info("Prepared subset: {}".format(subset_name)) return subset, bulk_writes def create_version_data(self, instance): From ba2c6e6f084e5829f32250735f13f045cabca800 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 28 Mar 2022 15:43:57 +0200 Subject: [PATCH 057/785] Fix class type --- openpype/plugins/publish/collect_subset_group.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/plugins/publish/collect_subset_group.py b/openpype/plugins/publish/collect_subset_group.py index 5756563ed3..56cd7de94e 100644 --- a/openpype/plugins/publish/collect_subset_group.py +++ b/openpype/plugins/publish/collect_subset_group.py @@ -17,7 +17,7 @@ from openpype.lib import ( ) -class CollectSubsetGroup(pyblish.api.ContextPlugin): +class CollectSubsetGroup(pyblish.api.InstancePlugin): """Collect Subset Group for publish.""" # Run after CollectAnatomyInstanceData From e6665e579ee069b30a02b1034e53d48c85553761 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 28 Mar 2022 20:32:46 +0200 Subject: [PATCH 058/785] Restructure code and more cleanup --- openpype/plugins/publish/integrate_new.py | 250 +++++++++++----------- 1 file changed, 123 insertions(+), 127 deletions(-) diff --git a/openpype/plugins/publish/integrate_new.py b/openpype/plugins/publish/integrate_new.py index 7a3ca2bdf7..6401806394 100644 --- a/openpype/plugins/publish/integrate_new.py +++ b/openpype/plugins/publish/integrate_new.py @@ -17,6 +17,21 @@ from openpype.lib.file_transaction import FileTransaction log = logging.getLogger(__name__) +def get_instance_families(instance): + """Get all families of the instance""" + # todo: move this to lib? + family = instance.data.get("family") + families = [] + if family: + families.append(family) + + for _family in (instance.data.get("families") or []): + if _family not in families: + families.append(_family) + + return families + + def get_frame_padded(frame, padding): """Return frame number as string with `padding` amount of padded zeros""" return "{frame:0{padding}d}".format(padding=padding, frame=frame) @@ -119,7 +134,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): def process(self, instance): # Exclude instances that also contain families from exclude families - families = set(self._get_instance_families(instance)) + families = set(get_instance_families(instance)) exclude = families & set(self.exclude_families) if exclude: self.log.debug("Instance not integrated due to exclude " @@ -140,22 +155,6 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): # the try, except. file_transactions.finalize() - def get_profile_filter_criteria(self, instance): - """Return filter criteria for `filter_profiles`""" - # Anatomy data is pre-filled by Collectors - anatomy_data = instance.data["anatomyData"] - - # Task can be optional in anatomy data - task = anatomy_data.get("task", {}) - - # Return filter criteria - return { - "families": anatomy_data["family"], - "tasks": task.get("name"), - "hosts": anatomy_data["app"], - "task_types": task.get("type") - } - def register(self, instance, file_transactions): instance_stagingdir = instance.data.get("stagingDir") @@ -171,16 +170,16 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): "@ {0}".format(instance_stagingdir) ) - # Ensure at least one file is set up for transfer in staging dir. + # Ensure at least one representation is set up for registering. repres = instance.data.get("representations") - assert repres, "Instance has no files to transfer" + assert repres, "Instance has representations data" assert isinstance(repres, (list, tuple)), ( - "Instance 'files' must be a list, got: {0} {1}".format( + "Instance 'repres' must be a list, got: {0} {1}".format( str(type(repres)), str(repres) ) ) - template_name = self._get_template_name(instance) + template_name = self.get_template_name(instance) subset, subset_writes = self.prepare_subset(instance) version, version_writes = self.prepare_version(instance, subset) @@ -300,6 +299,56 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): self.log.info("Registered {} representations" "".format(len(prepared_representations))) + def prepare_subset(self, instance): + asset = instance.data.get("assetEntity") + subset_name = instance.data["subset"] + self.log.debug("Subset: {}".format(subset_name)) + + # Get existing subset if it exists + subset = io.find_one({ + "type": "subset", + "parent": asset["_id"], + "name": subset_name + }) + + # Define subset data + data = { + "families": get_instance_families(instance) + } + + subset_group = instance.data.get("subsetGroup") + if subset_group: + data["subsetGroup"] = subset_group + + bulk_writes = [] + if subset is None: + # Create a new subset + self.log.info("Subset '%s' not found, creating ..." % subset_name) + subset = { + "_id": ObjectId(), + "schema": "openpype:subset-3.0", + "type": "subset", + "name": subset_name, + "data": data, + "parent": asset["_id"] + } + bulk_writes.append(InsertOne(subset)) + + else: + # Update existing subset data with new data and set in database. + # We also change the found subset in-place so we don't need to + # re-query the subset afterwards + subset["data"].update(data) + bulk_writes.append(UpdateOne( + {"type": "subset", "_id": subset["_id"]}, + {"$set": { + "data": subset["data"] + }} + )) + + self.log.info("Prepared subset: {}".format(subset_name)) + return subset, bulk_writes + def prepare_version(self, instance, subset): version_number = instance.data["version"] @@ -559,91 +608,14 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): "published_files": [transfer[1] for transfer in transfers] } - def _get_instance_families(self, instance): - """Get all families of the instance""" - # todo: move this to lib? - family = instance.data.get("family") - families = [] - if family: - families.append(family) - - for _family in (instance.data.get("families") or []): - if _family not in families: - families.append(_family) - - return families - - def _get_template_name(self, instance): - """Return anatomy template name to use for integration""" - - # Define publish template name from profiles - filter_criteria = self.get_profile_filter_criteria(instance) - profile = filter_profiles(self.template_name_profiles, - filter_criteria, - logger=self.log) - template_name = self.default_template_name - if profile: - template_name = profile["template_name"] - return template_name - - def prepare_subset(self, instance): - asset = instance.data.get("assetEntity") - subset_name = instance.data["subset"] - self.log.debug("Subset: {}".format(subset_name)) - - # Get existing subset if it exists - subset = io.find_one({ - "type": "subset", - "parent": asset["_id"], - "name": subset_name - }) - - # Define subset data - data = { - "families": self._get_instance_families(instance) - } - - subset_group = instance.data.get("subsetGroup") - if subset_group: - data["subsetGroup"] = subset_group - - bulk_writes = [] - if subset is None: - # Create a new subset - self.log.info("Subset '%s' not found, creating ..." % subset_name) - subset = { - "_id": ObjectId(), - "schema": "openpype:subset-3.0", - "type": "subset", - "name": subset_name, - "data": data, - "parent": asset["_id"] - } - bulk_writes.append(InsertOne(subset)) - - else: - # Update existing subset data with new data and set in database. - # We also change the found subset in-place so we don't need to - # re-query the subset afterwards - subset["data"].update(data) - bulk_writes.append(UpdateOne( - {"type": "subset", "_id": subset["_id"]}, - {"$set": { - "data": subset["data"] - }} - )) - - self.log.info("Prepared subset: {}".format(subset_name)) - return subset, bulk_writes - def create_version_data(self, instance): - """Create the data collection for the version + """Create the data dictionary for the version Args: instance: the current instance being published Returns: - dict: the required information with instance.data as key + dict: the required information for version["data"] """ context = instance.context @@ -658,7 +630,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): self.log.debug("Source: {}".format(source)) version_data = { - "families": self._get_instance_families(instance), + "families": get_instance_families(instance), "time": context.data["time"], "author": context.data["user"], "source": source, @@ -692,28 +664,52 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): return version_data - def main_family_from_instance(self, instance): - """Returns main family of entered instance.""" - return self._get_instance_families(instance)[0] + def get_template_name(self, instance): + """Return anatomy template name to use for integration""" + + # Define publish template name from profiles + filter_criteria = self.get_profile_filter_criteria(instance) + profile = filter_profiles(self.template_name_profiles, + filter_criteria, + logger=self.log) + template_name = self.default_template_name + if profile: + template_name = profile["template_name"] + return template_name + + def get_profile_filter_criteria(self, instance): + """Return filter criteria for `filter_profiles`""" + # Anatomy data is pre-filled by Collectors + anatomy_data = instance.data["anatomyData"] + + # Task can be optional in anatomy data + task = anatomy_data.get("task", {}) + + # Return filter criteria + return { + "families": anatomy_data["family"], + "tasks": task.get("name"), + "hosts": anatomy_data["app"], + "task_types": task.get("type") + } def get_rootless_path(self, anatomy, path): - """ Returns, if possible, path without absolute portion from host - (eg. 'c:\' or '/opt/..') - This information is host dependent and shouldn't be captured. - Example: - 'c:/projects/MyProject1/Assets/publish...' > - '{root}/MyProject1/Assets...' + """Returns, if possible, path without absolute portion from root + (eg. 'c:\' or '/opt/..') + + This information is platform dependent and shouldn't be captured. + Example: + 'c:/projects/MyProject1/Assets/publish...' > + '{root}/MyProject1/Assets...' Args: - anatomy: anatomy part from instance - path: path (absolute) + anatomy: anatomy part from instance + path: path (absolute) Returns: - path: modified path if possible, or unmodified path - + warning logged + path: modified path if possible, or unmodified path + + warning logged """ - success, rootless_path = ( - anatomy.find_root_template_from_path(path) - ) + success, rootless_path = anatomy.find_root_template_from_path(path) if success: path = rootless_path else: @@ -731,9 +727,9 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): Context info. Arguments: - instance: the current instance being published - integrated_file_sizes: dictionary of destination path (absolute) - and its file size + transfers (list): List of transferred files (source, destination) + sites (list): array of published locations + anatomy: anatomy part from instance Returns: output_resources: array of dictionaries to be added to 'files' key in representation @@ -749,14 +745,14 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): """ Prepare information for one file (asset or resource) Arguments: - path: destination url of published file (rootless) - size(optional): size of file in bytes - file_hash(optional): hash of file for synchronization validation - sites(optional): array of published locations, - [ {'name':'studio', 'created_dt':date} by default - keys expected ['studio', 'site1', 'gdrive1'] + path: destination url of published file + anatomy: anatomy part from instance + sites: array of published locations, + [ {'name':'studio', 'created_dt':date} by default + keys expected ['studio', 'site1', 'gdrive1'] + Returns: - rec: dictionary with filled info + dict: file info dictionary """ file_hash = openpype.api.source_hash(path) From 2777c36eb52e7390b15accc93c9b9a9a771ba21d Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 28 Mar 2022 20:34:16 +0200 Subject: [PATCH 059/785] Rely on `instance.data["fps"] over `context.data["fps"]` if available --- openpype/plugins/publish/integrate_new.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/openpype/plugins/publish/integrate_new.py b/openpype/plugins/publish/integrate_new.py index 6401806394..00922b0ed3 100644 --- a/openpype/plugins/publish/integrate_new.py +++ b/openpype/plugins/publish/integrate_new.py @@ -636,9 +636,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): "source": source, "comment": context.data.get("comment"), "machine": context.data.get("machine"), - "fps": context.data.get( - "fps", instance.data.get("fps") - ) + "fps": instance.data.get("fps", context.data.get("fps")) } intent_value = context.data.get("intent") From add4958d4c9078b6ecad131f6e40beb66ecdd348 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 29 Mar 2022 09:43:27 +0200 Subject: [PATCH 060/785] Fix message --- openpype/plugins/publish/integrate_new.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/plugins/publish/integrate_new.py b/openpype/plugins/publish/integrate_new.py index 00922b0ed3..f6aa720dbb 100644 --- a/openpype/plugins/publish/integrate_new.py +++ b/openpype/plugins/publish/integrate_new.py @@ -172,7 +172,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): # Ensure at least one representation is set up for registering. repres = instance.data.get("representations") - assert repres, "Instance has representations data" + assert repres, "Instance has no representations data" assert isinstance(repres, (list, tuple)), ( "Instance 'repres' must be a list, got: {0} {1}".format( str(type(repres)), str(repres) From 77b5c24370b61615b2380fdc464137d3eba13ab9 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 29 Mar 2022 11:44:30 +0200 Subject: [PATCH 061/785] Fix message --- openpype/plugins/publish/integrate_new.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/plugins/publish/integrate_new.py b/openpype/plugins/publish/integrate_new.py index f6aa720dbb..020b1d2b9c 100644 --- a/openpype/plugins/publish/integrate_new.py +++ b/openpype/plugins/publish/integrate_new.py @@ -174,7 +174,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): repres = instance.data.get("representations") assert repres, "Instance has no representations data" assert isinstance(repres, (list, tuple)), ( - "Instance 'repres' must be a list, got: {0} {1}".format( + "Instance 'representations' must be a list, got: {0} {1}".format( str(type(repres)), str(repres) ) ) From 127f19873f876d58a2c954c4a56c73ddd4d4d4af Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 29 Mar 2022 11:58:52 +0200 Subject: [PATCH 062/785] Streamlining some code, optimize some database queries with projection --- openpype/plugins/publish/integrate_new.py | 36 ++++++++++++----------- 1 file changed, 19 insertions(+), 17 deletions(-) diff --git a/openpype/plugins/publish/integrate_new.py b/openpype/plugins/publish/integrate_new.py index 020b1d2b9c..d869a1b6be 100644 --- a/openpype/plugins/publish/integrate_new.py +++ b/openpype/plugins/publish/integrate_new.py @@ -187,10 +187,14 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): # Get existing representations (if any) existing_repres_by_name = { - repres["name"].lower(): repres for repres in io.find({ - "parent": version["_id"], - "type": "representation" - }) + repres["name"].lower(): repres for repres in io.find( + { + "parent": version["_id"], + "type": "representation" + }, + # Only care about id and name of existing representations + projection={"_id": True, "name": True} + ) } # Prepare all representations @@ -239,16 +243,17 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): "{}".format(file_transactions.backups)) self.log.debug("Transferred files: " "{}".format(file_transactions.transferred)) - - # Finalize the representations now the published files are integrated - # Get 'files' info for representations and its attached resources self.log.debug("Retrieving Representation Site Sync information ...") + + # Get the accessible sites for Site Sync sites = SiteSync.compute_resource_sync_sites( system_settings=instance.context.data["system_settings"], project_settings=instance.context.data["project_settings"] ) - self.log.debug("final sites:: {}".format(sites)) + self.log.debug("Site Sync Sites: {}".format(sites)) + # Finalize the representations now the published files are integrated + # Get 'files' info for representations and its attached resources anatomy = instance.context.data["anatomy"] representation_writes = [] new_repre_names_low = set() @@ -365,7 +370,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): 'type': 'version', 'parent': subset["_id"], 'name': version_number - }) + }, projection={"_id": True}) if existing_version: self.log.debug("Updating existing version ...") @@ -576,7 +581,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): # todo: `repre` is not the actual `representation` entity # we should simplify/clarify difference between data above # and the actual representation entity for the database - data = repre.get("data") or {} + data = repre.get("data", {}) data.update({'path': published_path, 'template': template}) representation = { "_id": repre_id, @@ -664,16 +669,15 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): def get_template_name(self, instance): """Return anatomy template name to use for integration""" - # Define publish template name from profiles filter_criteria = self.get_profile_filter_criteria(instance) profile = filter_profiles(self.template_name_profiles, filter_criteria, logger=self.log) - template_name = self.default_template_name if profile: - template_name = profile["template_name"] - return template_name + return profile["template_name"] + else: + return self.default_template_name def get_profile_filter_criteria(self, instance): """Return filter criteria for `filter_profiles`""" @@ -752,13 +756,11 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): Returns: dict: file info dictionary """ - file_hash = openpype.api.source_hash(path) - return { "_id": ObjectId(), "path": self.get_rootless_path(anatomy, path), "size": os.path.getsize(path), - "hash": file_hash, + "hash": openpype.api.source_hash(path), "sites": sites } From 0c2c60d37b05411193acf8c60f6a2562463ba558 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 29 Mar 2022 12:23:24 +0200 Subject: [PATCH 063/785] Unify usage of `clique.assemble` --- openpype/plugins/publish/integrate_new.py | 60 ++++++++++++++--------- 1 file changed, 37 insertions(+), 23 deletions(-) diff --git a/openpype/plugins/publish/integrate_new.py b/openpype/plugins/publish/integrate_new.py index d869a1b6be..1ceb99e9fe 100644 --- a/openpype/plugins/publish/integrate_new.py +++ b/openpype/plugins/publish/integrate_new.py @@ -17,6 +17,41 @@ from openpype.lib.file_transaction import FileTransaction log = logging.getLogger(__name__) +def assemble(files): + """Convenience `clique.assemble` wrapper for files of a single collection. + + Unlike `clique.assemble` this wrapper does not allow more than a single + Collection nor any remainder files. Errors will be raised when not only + a single collection is assembled. + + Returns: + clique.Collection: A single sequence Collection + + Raises: + ValueError: Error is raised when files do not result in a single + collected Collection. + + """ + # todo: move this to lib? + # Get the sequence as a collection. The files must be of a single + # sequence and have no remainder outside of the collections. + patterns = [clique.PATTERNS["frames"]] + collections, remainder = clique.assemble(files, + minimum_items=1, + patterns=patterns) + if not collections: + raise ValueError("No collections found in files: " + "{}".format(files)) + if remainder: + raise ValueError("Files found not detected as part" + " of a sequence: {}".format(remainder)) + if len(collections) > 1: + raise ValueError("Files in sequence are not part of a" + " single sequence collection: " + "{}".format(collections)) + return collections[0] + + def get_instance_families(instance): """Get all families of the instance""" # todo: move this to lib? @@ -451,21 +486,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): "Given file names contain full paths" ) - # Get the sequence as a collection. The files must be of a single - # sequence and have no remainder outside of the collections. - collections, remainder = clique.assemble(files, - minimum_items=1) - if not collections: - raise ValueError("No collections found in files: " - "{}".format(files)) - if remainder: - raise ValueError("Files found not detected as part" - " of a sequence: {}".format(remainder)) - if len(collections) > 1: - raise ValueError("Files in sequence are not part of a" - " single sequence collection: " - "{}".format(collections)) - src_collection = collections[0] + src_collection = assemble(files) # If the representation has `frameStart` set it renumbers the # frame indices of the published collection. It will start from @@ -512,14 +533,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): template_filled = anatomy_filled[template_name]["path"] repre_context = template_filled.used_values self.log.debug("Template filled: {}".format(str(template_filled))) - dst_collections, _remainder = clique.assemble( - [os.path.normpath(template_filled)], - minimum_items=1, - patterns=[clique.PATTERNS["frames"]] - ) - assert not _remainder, "This is a bug" - assert len(dst_collections) == 1, "This is a bug" - dst_collection = dst_collections[0] + dst_collection = assemble([os.path.normpath(template_filled)]) # Update the destination indexes and padding dst_collection.indexes.clear() From 44d6199a9e4ea7342fb2ef6bd583e0e373da2545 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 29 Mar 2022 12:28:47 +0200 Subject: [PATCH 064/785] Organize single file code more like sequence file code --- openpype/plugins/publish/integrate_new.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/openpype/plugins/publish/integrate_new.py b/openpype/plugins/publish/integrate_new.py index 1ceb99e9fe..1592789390 100644 --- a/openpype/plugins/publish/integrate_new.py +++ b/openpype/plugins/publish/integrate_new.py @@ -551,21 +551,24 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): else: # Single file - template_data.pop("frame", None) fname = files assert not os.path.isabs(fname), ( "Given file name is a full path" ) - # Store used frame value to template data + + # Manage anatomy template data + template_data.pop("frame", None) if repre.get("udim"): template_data["udim"] = repre["udim"][0] - src = os.path.join(stagingdir, fname) + + # Construct destination filepath from template anatomy_filled = anatomy.format(template_data) template_filled = anatomy_filled[template_name]["path"] repre_context = template_filled.used_values dst = os.path.normpath(template_filled) # Single file transfer + src = os.path.join(stagingdir, fname) transfers = [(src, dst)] for key in self.db_representation_context_keys: From a2a77b8a2099b902e01816ec66a2f308e43004d1 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 29 Mar 2022 12:51:08 +0200 Subject: [PATCH 065/785] Cleanup `get_files_info` docstring --- openpype/plugins/publish/integrate_new.py | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/openpype/plugins/publish/integrate_new.py b/openpype/plugins/publish/integrate_new.py index 1592789390..0ee2a6286f 100644 --- a/openpype/plugins/publish/integrate_new.py +++ b/openpype/plugins/publish/integrate_new.py @@ -739,11 +739,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): return path def get_files_info(self, transfers, sites, anatomy): - """ Prepare 'files' portion for attached resources and main asset. - Combining records from 'transfers' and 'hardlinks' parts from - instance. - All attached resources should be added, currently without - Context info. + """Prepare 'files' info portion for representations. Arguments: transfers (list): List of transferred files (source, destination) From 6fe6841c996594871a535daf2c21914e5cc32575 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 29 Mar 2022 13:18:04 +0200 Subject: [PATCH 066/785] Capture edge case where all "representations" are tagged for delete --- openpype/plugins/publish/integrate_new.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/openpype/plugins/publish/integrate_new.py b/openpype/plugins/publish/integrate_new.py index 0ee2a6286f..80e1909687 100644 --- a/openpype/plugins/publish/integrate_new.py +++ b/openpype/plugins/publish/integrate_new.py @@ -255,6 +255,12 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): prepared_representations.append(prepared) + if not prepared_representations: + # Even though we check `instance.data["representations"]` earlier + # this could still happen if all representations were tagged with + # "delete" and thus are skipped for integration + raise RuntimeError("No representations prepared to publish.") + # Each instance can also have pre-defined transfers not explicitly # part of a representation - like texture resources used by a # .ma representation. Those destination paths are pre-defined, etc. From a7a908d1348381ab0c4df9c29861d7c02be635cb Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 29 Mar 2022 13:20:51 +0200 Subject: [PATCH 067/785] Improve docstring --- openpype/plugins/publish/integrate_new.py | 39 +++++++++++------------ 1 file changed, 18 insertions(+), 21 deletions(-) diff --git a/openpype/plugins/publish/integrate_new.py b/openpype/plugins/publish/integrate_new.py index 80e1909687..8e666f3400 100644 --- a/openpype/plugins/publish/integrate_new.py +++ b/openpype/plugins/publish/integrate_new.py @@ -84,29 +84,26 @@ def bulk_write(writes): class IntegrateAssetNew(pyblish.api.InstancePlugin): - """Resolve any dependency issues + """Register publish in the database and transfer files to destinations. - This plug-in resolves any paths which, if not updated might break - the published file. + Steps: + 1) Register the subset and version + 2) Transfer the representation files to the destination + 3) Register the representation - The order of families is important, when working with lookdev you want to - first publish the texture, update the texture paths in the nodes and then - publish the shading network. Same goes for file dependent assets. - - Requirements for instance to be correctly integrated - - instance.data['representations'] - must be a list and each member - must be a dictionary with following data: - 'files': list of filenames for sequence, string for single file. - Only the filename is allowed, without the folder path. - 'stagingDir': "path/to/folder/with/files" - 'name': representation name (usually the same as extension) - 'ext': file extension - optional data - "frameStart" - "frameEnd" - 'fps' - "data": additional metadata for each representation. + Requires: + instance.data['representations'] - must be a list and each member + must be a dictionary with following data: + 'files': list of filenames for sequence, string for single file. + Only the filename is allowed, without the folder path. + 'stagingDir': "path/to/folder/with/files" + 'name': representation name (usually the same as extension) + 'ext': file extension + optional data + "frameStart" + "frameEnd" + 'fps' + "data": additional metadata for each representation. """ label = "Integrate Asset New" From 3ec9684239b7afc326cad7e184a7c6ed4e7a6058 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Sat, 2 Apr 2022 11:28:13 +0200 Subject: [PATCH 068/785] Only add `frame` to context if used by the destination template --- openpype/plugins/publish/integrate_new.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/plugins/publish/integrate_new.py b/openpype/plugins/publish/integrate_new.py index 3543786949..99a915af73 100644 --- a/openpype/plugins/publish/integrate_new.py +++ b/openpype/plugins/publish/integrate_new.py @@ -158,7 +158,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): exclude_families = ["clip"] db_representation_context_keys = [ "project", "asset", "task", "subset", "version", "representation", - "family", "hierarchy", "task", "username", "frame" + "family", "hierarchy", "task", "username" ] default_template_name = "publish" From 6733df77f1f693b89078f216457621d129eb4f71 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Sat, 2 Apr 2022 11:30:23 +0200 Subject: [PATCH 069/785] Remove double entry of "task" --- openpype/plugins/publish/integrate_new.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/plugins/publish/integrate_new.py b/openpype/plugins/publish/integrate_new.py index 99a915af73..da4dafb133 100644 --- a/openpype/plugins/publish/integrate_new.py +++ b/openpype/plugins/publish/integrate_new.py @@ -158,7 +158,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): exclude_families = ["clip"] db_representation_context_keys = [ "project", "asset", "task", "subset", "version", "representation", - "family", "hierarchy", "task", "username" + "family", "hierarchy", "username" ] default_template_name = "publish" From c95c9f92b92f37eca20b1dbc82c3ef0620f8f753 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Sat, 2 Apr 2022 11:34:52 +0200 Subject: [PATCH 070/785] Add comment --- openpype/plugins/publish/integrate_new.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/openpype/plugins/publish/integrate_new.py b/openpype/plugins/publish/integrate_new.py index da4dafb133..a2943e2972 100644 --- a/openpype/plugins/publish/integrate_new.py +++ b/openpype/plugins/publish/integrate_new.py @@ -577,6 +577,8 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): transfers = [(src, dst)] for key in self.db_representation_context_keys: + # Also add these values to the context even if not used by the + # destination template value = template_data.get(key) if not value: continue From 65691bf5207cf57b679dd4b36b3abb6ae57e0be5 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Sat, 2 Apr 2022 11:36:32 +0200 Subject: [PATCH 071/785] Explain why we write subset+version first --- openpype/plugins/publish/integrate_new.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/openpype/plugins/publish/integrate_new.py b/openpype/plugins/publish/integrate_new.py index a2943e2972..bab46803cb 100644 --- a/openpype/plugins/publish/integrate_new.py +++ b/openpype/plugins/publish/integrate_new.py @@ -270,7 +270,10 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): file_transactions.add(src, dst, mode=FileTransaction.MODE_HARDLINK) # Bulk write to the database - # todo: Can we move this even to after the file transfers? + # We write the subset and version to the database before the File + # Transaction to reduce the chances of another publish trying to + # publish to the same version number since that chance can greatly + # increase if the file transaction takes a long time. bulk_write(subset_writes + version_writes) self.log.info("Subset {subset[name]} and Version {version[name]} " "written to database..".format(subset=subset, From 0d83f3c76c880d088de718a416370e69529ad4a5 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Sat, 2 Apr 2022 11:38:43 +0200 Subject: [PATCH 072/785] Add to do for potential erroneous case --- openpype/plugins/publish/integrate_new.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/openpype/plugins/publish/integrate_new.py b/openpype/plugins/publish/integrate_new.py index bab46803cb..84adccb633 100644 --- a/openpype/plugins/publish/integrate_new.py +++ b/openpype/plugins/publish/integrate_new.py @@ -602,6 +602,9 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): # Backwards compatibility: # Store first transferred destination as published path data # todo: can we remove this? + # todo: We shouldn't change data that makes its way back into + # instance.data[] until we know the publish actually succeeded + # otherwise `published_path` might not actually be valid? published_path = transfers[0][1] repre["published_path"] = published_path # Backwards compatibility From 89376a97e4ef85069a3afdfd5e3115b33bd27284 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Sat, 2 Apr 2022 20:47:00 +0200 Subject: [PATCH 073/785] Also include file infos of resource files like textures into each representation - This should fix Site Sync for lookdev textures, etc. --- openpype/plugins/publish/integrate_new.py | 29 ++++++++++++++++------- 1 file changed, 21 insertions(+), 8 deletions(-) diff --git a/openpype/plugins/publish/integrate_new.py b/openpype/plugins/publish/integrate_new.py index 84adccb633..25ab7817c9 100644 --- a/openpype/plugins/publish/integrate_new.py +++ b/openpype/plugins/publish/integrate_new.py @@ -264,10 +264,13 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): # part of a representation - like texture resources used by a # .ma representation. Those destination paths are pre-defined, etc. # todo: should we move or simplify this logic? + resource_destinations = set() for src, dst in instance.data.get("transfers", []): file_transactions.add(src, dst, mode=FileTransaction.MODE_COPY) + resource_destinations.add(os.path.abspath(dst)) for src, dst in instance.data.get("hardlinks", []): file_transactions.add(src, dst, mode=FileTransaction.MODE_HARDLINK) + resource_destinations.add(os.path.abspath(dst)) # Bulk write to the database # We write the subset and version to the database before the File @@ -295,18 +298,29 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): ) self.log.debug("Site Sync Sites: {}".format(sites)) + # Compute the resource file infos once (files belonging to the + # version instance instead of an individual representation) so + # we can re-use those file infos per representation + anatomy = instance.context.data["anatomy"] + resource_file_infos = self.prepare_file_info(resource_destinations, + sites=sites, + anatomy=anatomy) + # Finalize the representations now the published files are integrated # Get 'files' info for representations and its attached resources - anatomy = instance.context.data["anatomy"] representation_writes = [] new_repre_names_low = set() for prepared in prepared_representations: - transfers = prepared["transfers"] representation = prepared["representation"] + transfers = prepared["transfers"] + destinations = [dst for src, dst in transfers] representation["files"] = self.get_files_info( - transfers, sites, anatomy + destinations, sites=sites, anatomy=anatomy ) + # Add the version resource file infos to each representation + representation["files"] += resource_file_infos + # Set up representation for writing to the database. Since # we *might* be overwriting an existing entry if the version # already existed we'll use ReplaceOnce with `upsert=True` @@ -751,11 +765,11 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): ).format(path)) return path - def get_files_info(self, transfers, sites, anatomy): + def get_files_info(self, destinations, sites, anatomy): """Prepare 'files' info portion for representations. Arguments: - transfers (list): List of transferred files (source, destination) + destinations (list): List of transferred file destinations sites (list): array of published locations anatomy: anatomy part from instance Returns: @@ -763,10 +777,9 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): in representation """ file_infos = [] - for _src, dest in transfers: - file_info = self.prepare_file_info(dest, anatomy, sites=sites) + for file_path in destinations: + file_info = self.prepare_file_info(file_path, anatomy, sites=sites) file_infos.append(file_info) - return file_infos def prepare_file_info(self, path, anatomy, sites): From e6209555b01a0330186bc9176c8331a130325186 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Sat, 2 Apr 2022 20:50:13 +0200 Subject: [PATCH 074/785] Match behavior more with what integrator did before refactor --- openpype/plugins/publish/collect_anatomy_context_data.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/plugins/publish/collect_anatomy_context_data.py b/openpype/plugins/publish/collect_anatomy_context_data.py index 346caf6b83..c3fabba2ce 100644 --- a/openpype/plugins/publish/collect_anatomy_context_data.py +++ b/openpype/plugins/publish/collect_anatomy_context_data.py @@ -93,9 +93,9 @@ class CollectAnatomyContextData(pyblish.api.ContextPlugin): intent = context.data.get("intent") if intent and isinstance(intent, dict): - intent_value = intent.get("value") - if intent_value: - context_data["intent"] = intent_value + intent = intent.get("value") + if intent: + context_data["intent"] = intent self.log.info("Global anatomy Data collected") self.log.debug(json.dumps(context_data, indent=4)) From 52fd21d85494dacd0071a3b08d79dbdd04789b30 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Sat, 2 Apr 2022 20:51:56 +0200 Subject: [PATCH 075/785] Add todo/question regarding `intent` --- openpype/plugins/publish/collect_anatomy_context_data.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/openpype/plugins/publish/collect_anatomy_context_data.py b/openpype/plugins/publish/collect_anatomy_context_data.py index c3fabba2ce..3f7e65ecd3 100644 --- a/openpype/plugins/publish/collect_anatomy_context_data.py +++ b/openpype/plugins/publish/collect_anatomy_context_data.py @@ -91,6 +91,8 @@ class CollectAnatomyContextData(pyblish.api.ContextPlugin): } }) + # todo: some code actually expects the dict itself and others doesn't + # question: what should it be? intent = context.data.get("intent") if intent and isinstance(intent, dict): intent = intent.get("value") From 4c78976d3d834a5cb1fd0bce44f465cbf3ac6375 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Sat, 2 Apr 2022 20:55:40 +0200 Subject: [PATCH 076/785] Add todo --- openpype/plugins/publish/integrate_new.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/openpype/plugins/publish/integrate_new.py b/openpype/plugins/publish/integrate_new.py index 25ab7817c9..e0c0632548 100644 --- a/openpype/plugins/publish/integrate_new.py +++ b/openpype/plugins/publish/integrate_new.py @@ -688,6 +688,8 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): "fps": instance.data.get("fps", context.data.get("fps")) } + # todo: preferably we wouldn't need this "if dict" etc. logic and + # instead be able to rely what the input value is if it's set. intent_value = context.data.get("intent") if intent_value and isinstance(intent_value, dict): intent_value = intent_value.get("value") From 3e095bc7554a24ef13282ccfd87e0327eb3b8745 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Sat, 2 Apr 2022 20:57:38 +0200 Subject: [PATCH 077/785] Use template name for frame padding anatomy template --- openpype/plugins/publish/integrate_new.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/openpype/plugins/publish/integrate_new.py b/openpype/plugins/publish/integrate_new.py index e0c0632548..0f3b11a025 100644 --- a/openpype/plugins/publish/integrate_new.py +++ b/openpype/plugins/publish/integrate_new.py @@ -520,8 +520,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): if repre.get("frameStart") is not None: index_frame_start = int(repre.get("frameStart")) - # TODO use frame padding from right template group - render_template = anatomy.templates["render"] + render_template = anatomy.templates[template_name] frame_start_padding = int( render_template.get( "frame_padding", From b12b1c80f2facbe343333ba3d70dcbe463383538 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Sat, 2 Apr 2022 21:00:10 +0200 Subject: [PATCH 078/785] Never shift udim sequences --- openpype/plugins/publish/integrate_new.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/openpype/plugins/publish/integrate_new.py b/openpype/plugins/publish/integrate_new.py index 0f3b11a025..fd0d57c646 100644 --- a/openpype/plugins/publish/integrate_new.py +++ b/openpype/plugins/publish/integrate_new.py @@ -501,6 +501,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): anatomy = instance.context.data['anatomy'] template = os.path.normpath(anatomy.templates[template_name]["path"]) + is_udim = bool(repre.get("udim")) is_sequence_representation = isinstance(files, (list, tuple)) if is_sequence_representation: # Collection of files (sequence) @@ -517,7 +518,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): # frame indices from the source collection. destination_indexes = list(src_collection.indexes) destination_padding = len(get_first_frame_padded(src_collection)) - if repre.get("frameStart") is not None: + if repre.get("frameStart") is not None and not is_udim: index_frame_start = int(repre.get("frameStart")) render_template = anatomy.templates[template_name] @@ -543,7 +544,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): # from the source indexes, etc. first_index_padded = get_frame_padded(frame=destination_indexes[0], padding=destination_padding) - if repre.get("udim"): + if is_udim: # UDIM representations handle ranges in a different manner template_data["udim"] = first_index_padded else: @@ -579,7 +580,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): # Manage anatomy template data template_data.pop("frame", None) - if repre.get("udim"): + if is_udim: template_data["udim"] = repre["udim"][0] # Construct destination filepath from template From f7d35c4fed0885c6656da03eb852706c6bf20117 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Sat, 2 Apr 2022 21:01:09 +0200 Subject: [PATCH 079/785] add todo/question --- openpype/plugins/publish/integrate_new.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/openpype/plugins/publish/integrate_new.py b/openpype/plugins/publish/integrate_new.py index fd0d57c646..52c7686473 100644 --- a/openpype/plugins/publish/integrate_new.py +++ b/openpype/plugins/publish/integrate_new.py @@ -522,6 +522,8 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): index_frame_start = int(repre.get("frameStart")) render_template = anatomy.templates[template_name] + # todo: should we ALWAYS manage the frame padding even when not + # having `frameStart` set? frame_start_padding = int( render_template.get( "frame_padding", From 70bfdd09b40936efc45efa6bbd1ea029447058f2 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Sat, 2 Apr 2022 21:07:02 +0200 Subject: [PATCH 080/785] Remove old "dependencies" data --- openpype/plugins/publish/integrate_new.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/plugins/publish/integrate_new.py b/openpype/plugins/publish/integrate_new.py index 52c7686473..37c68ffa6d 100644 --- a/openpype/plugins/publish/integrate_new.py +++ b/openpype/plugins/publish/integrate_new.py @@ -636,7 +636,6 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): "parent": version["_id"], "name": repre['name'], "data": data, - "dependencies": instance.data.get("dependencies", "").split(), # Imprint shortcut to context for performance reasons. "context": repre_context From 45745cc514236d64cc7f2feddbff9e6217b720fa Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Sun, 3 Apr 2022 20:37:28 +0200 Subject: [PATCH 081/785] Improve clarity of comment --- openpype/plugins/publish/integrate_new.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/plugins/publish/integrate_new.py b/openpype/plugins/publish/integrate_new.py index 37c68ffa6d..cb469251e6 100644 --- a/openpype/plugins/publish/integrate_new.py +++ b/openpype/plugins/publish/integrate_new.py @@ -604,7 +604,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): repre_context[key] = template_data[key] # Explicitly store the full list even though template data might - # have a different value + # have a different value because it uses just a single udim tile if repre.get("udim"): repre_context["udim"] = repre.get("udim") # store list From fe72197a9feb413c8f6c5f9e02339ed891fdda07 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Sun, 3 Apr 2022 20:40:25 +0200 Subject: [PATCH 082/785] Add comment --- openpype/plugins/publish/integrate_new.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/openpype/plugins/publish/integrate_new.py b/openpype/plugins/publish/integrate_new.py index cb469251e6..f1cceb9ca7 100644 --- a/openpype/plugins/publish/integrate_new.py +++ b/openpype/plugins/publish/integrate_new.py @@ -156,11 +156,14 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): "usdOverride" ] exclude_families = ["clip"] + default_template_name = "publish" + + # Representation context keys that should always be written to + # the database even if not used by the destination template db_representation_context_keys = [ "project", "asset", "task", "subset", "version", "representation", "family", "hierarchy", "username" ] - default_template_name = "publish" # Attributes set by settings template_name_profiles = None From c3c8281e0134222677b32f91ec644322dd996a74 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Sun, 3 Apr 2022 20:41:34 +0200 Subject: [PATCH 083/785] tweak comment --- openpype/plugins/publish/integrate_new.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/plugins/publish/integrate_new.py b/openpype/plugins/publish/integrate_new.py index f1cceb9ca7..238ae82bba 100644 --- a/openpype/plugins/publish/integrate_new.py +++ b/openpype/plugins/publish/integrate_new.py @@ -183,7 +183,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): self.register(instance, file_transactions) except Exception: # clean destination - # todo: rollback any registered entities? (or how safe are we?) + # todo: preferably we'd also rollback *any* changes to the database file_transactions.rollback() self.log.critical("Error when registering", exc_info=True) six.reraise(*sys.exc_info()) From 1177ee2a25403c143aa0d2639102ee1360ac77d2 Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Mon, 11 Apr 2022 15:21:59 +0300 Subject: [PATCH 084/785] Refactor Maya Create Render Schema --- .../schemas/schema_maya_create.json | 39 +- .../schemas/schema_maya_create_render.json | 417 ++++++++++++++++++ 2 files changed, 420 insertions(+), 36 deletions(-) create mode 100644 openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create_render.json diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json index 6dc10ed2a5..4e92875677 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json @@ -29,42 +29,9 @@ } ] }, - { - "type": "dict", - "collapsible": true, - "key": "CreateRender", - "label": "Create Render", - "checkbox_key": "enabled", - "children": [ - { - "type": "boolean", - "key": "enabled", - "label": "Enabled" - }, - { - "type": "list", - "key": "defaults", - "label": "Default Subsets", - "object_type": "text" - }, - { - "key": "aov_separator", - "label": "AOV Separator character", - "type": "enum", - "multiselection": false, - "default": "underscore", - "enum_items": [ - {"dash": "- (dash)"}, - {"underscore": "_ (underscore)"}, - {"dot": ". (dot)"} - ] - }, - { - "type": "text", - "key": "default_render_image_folder", - "label": "Default render image folder" - } - ] + { + "type": "schema", + "name": "schema_maya_create_render" }, { "type": "dict", diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create_render.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create_render.json new file mode 100644 index 0000000000..f4a724cd5c --- /dev/null +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create_render.json @@ -0,0 +1,417 @@ +{ + "type": "dict", + "collapsible": true, + "key": "CreateRender", + "label": "Create Render", + "checkbox_key": "enabled", + "children": [ + { + "type": "boolean", + "key": "enabled", + "label": "Enabled" + }, + { + "type": "list", + "key": "defaults", + "label": "Default Subsets", + "object_type": "text" + }, + { + "type": "text", + "key": "default_render_image_folder", + "label": "Default render image folder" + }, + { + "key": "aov_separator", + "label": "AOV Separator character", + "type": "enum", + "multiselection": false, + "default": "underscore", + "enum_items": [ + {"dash": "- (dash)"}, + {"underscore": "_ (underscore)"}, + {"dot": ". (dot)"} + ] + }, + { + "type": "dict", + "collapsible": true, + "key": "arnold_renderer", + "label": "Arnold Renderer", + "is_group": true, + "children": [ + { + "key": "image_prefix", + "label": "Image prefix template", + "type": "text" + }, + { + "key": "image_format", + "label": "Output Image Format", + "type": "enum", + "multiselection": false, + "defaults": "exr", + "enum_items": [ + {"jpeg": "jpeg"}, + {"png": "png"}, + {"deepexr": "deep exr"}, + {"tif": "tif"}, + {"exr": "exr"}, + {"maya": "maya"}, + {"mtoa_shaders": "mtoa_shaders"} + ] + }, + { + "key": "multilayer_exr", + "label": "Multilayer (exr)", + "type": "boolean" + }, + { + "key": "tiled", + "label": "Tiled (tif, exr)", + "type": "boolean" + }, + { + "key": "aov_list", + "label": "AOVs to create", + "type": "enum", + "multiselection": true, + "defaults": "empty", + "enum_items": [ + {"empty": "< empty >"}, + {"ID": "ID"}, + {"N": "N"}, + {"P": "P"}, + {"Pref": "Pref"}, + {"RGBA": "RGBA"}, + {"Z": "Z"}, + {"albedo": "albedo"}, + {"background": "background"}, + {"coat": "coat"}, + {"coat_albedo": "coat_albedo"}, + {"coat_direct": "coat_direct"}, + {"coat_indirect": "coat_indirect"}, + {"cputime": "cputime"}, + {"crypto_asset": "crypto_asset"}, + {"crypto_material": "cypto_material"}, + {"crypto_object": "crypto_object"}, + {"diffuse": "diffuse"}, + {"diffuse_albedo": "diffuse_albedo"}, + {"diffuse_direct": "diffuse_direct"}, + {"diffuse_indirect": "diffuse_indirect"}, + {"direct": "direct"}, + {"emission": "emission"}, + {"highlight": "highlight"}, + {"indirect": "indirect"}, + {"motionvector": "motionvector"}, + {"opacity": "opacity"}, + {"raycount": "raycount"}, + {"rim_light": "rim_light"}, + {"shadow": "shadow"}, + {"shadow_diff": "shadow_diff"}, + {"shadow_mask": "shadow_mask"}, + {"shadow_matte": "shadow_matte"}, + {"sheen": "sheen"}, + {"sheen_albedo": "sheen_albedo"}, + {"sheen_direct": "sheen_direct"}, + {"sheen_indirect": "sheen_indirect"}, + {"specular": "specular"}, + {"specular_albedo": "specular_albedo"}, + {"specular_direct": "specular_direct"}, + {"specular_indirect": "specular_indirect"}, + {"sss": "sss"}, + {"sss_albedo": "sss_albedo"}, + {"sss_direct": "sss_direct"}, + {"sss_indirect": "sss_indirect"}, + {"transmission": "transmission"}, + {"transmission_albedo": "transmission_albedo"}, + {"transmission_direct": "transmission_direct"}, + {"transmission_indirect": "transmission_indirect"}, + {"volume": "volume"}, + {"volume_Z": "volume_Z"}, + {"volume_albedo": "volume_albedo"}, + {"volume_direct": "volume_direct"}, + {"volume_indirect": "volume_indirect"}, + {"volume_opacity": "volume_opacity"} + ] + }, + { + "type": "label", + "label": "Add additional options - put attribute and value, like AASamples" + }, + { + "type": "dict-modifiable", + "key": "additional_options", + "label": "Additional Renderer Options", + "use_label_wrap": true, + "object_type": { + "type": "text" + } + } + ] + }, + { + "type": "dict", + "collapsible": true, + "key": "vray_renderer", + "label": "V-Ray Renderer", + "is_group": true, + "children": [ + { + "key": "image_prefix", + "label": "Image prefix template", + "type": "text" + }, + { + "key": "engine", + "label": "Production Engine", + "type": "enum", + "multiselection": false, + "defaults": "1", + "enum_items": [ + {"1": "V-Ray"}, + {"2": "V-Ray GPU"} + ] + }, + { + "key": "image_format", + "label": "Output Image Format", + "type": "enum", + "multiselection": false, + "defaults": "exr", + "enum_items": [ + {"png": "png"}, + {"jpg": "jpg"}, + {"vrimg": "vrimg"}, + {"hdr": "hdr"}, + {"exr": "exr"}, + {"exr (multichannel)": "exr (multichannel)"}, + {"exr (deep)": "exr (deep)"}, + {"tga": "tga"}, + {"bmp": "bmp"}, + {"sgi": "sgi"} + ] + }, + { + "key": "aov_list", + "label": "AOVs to create", + "type": "enum", + "multiselection": true, + "defaults": "empty", + "enum_items": [ + {"empty": "< empty >"}, + {"atmosphereChannel": "atmosphere"}, + {"backgroundChannel": "background"}, + {"bumpNormalsChannel": "bumpnormals"}, + {"causticsChannel": "caustics"}, + {"coatFilterChannel": "coat_filter"}, + {"coatGlossinessChannel": "coatGloss"}, + {"coatReflectionChannel": "coat_reflection"}, + {"vrayCoatChannel": "coat_specular"}, + {"CoverageChannel": "coverage"}, + {"cryptomatteChannel": "cryptomatte"}, + {"customColor": "custom_color"}, + {"drBucketChannel": "DR"}, + {"denoiserChannel": "denoiser"}, + {"diffuseChannel": "diffuse"}, + {"ExtraTexElement": "extraTex"}, + {"giChannel": "GI"}, + {"LightMixElement": "None"}, + {"lightingChannel": "lighting"}, + {"LightingAnalysisChannel": "LightingAnalysis"}, + {"materialIDChannel": "materialID"}, + {"MaterialSelectElement": "materialSelect"}, + {"matteShadowChannel": "matteShadow"}, + {"MultiMatteElement": "multimatte"}, + {"multimatteIDChannel": "multimatteID"}, + {"normalsChannel": "normals"}, + {"nodeIDChannel": "objectId"}, + {"objectSelectChannel": "objectSelect"}, + {"rawCoatFilterChannel": "raw_coat_filter"}, + {"rawCoatReflectionChannel": "raw_coat_reflection"}, + {"rawDiffuseFilterChannel": "rawDiffuseFilter"}, + {"rawGiChannel": "rawGI"}, + {"rawLightChannel": "rawLight"}, + {"rawReflectionChannel": "rawReflection"}, + {"rawReflectionFilterChannel": "rawReflectionFilter"}, + {"rawRefractionChannel": "rawRefraction"}, + {"rawRefractionFilterChannel": "rawRefractionFilter"}, + {"rawShadowChannel": "rawShadow"}, + {"rawSheenFilterChannel": "raw_sheen_filter"}, + {"rawSheenReflectionChannel": "raw_sheen_reflection"}, + {"rawTotalLightChannel": "rawTotalLight"}, + {"reflectIORChannel": "reflIOR"}, + {"reflectChannel": "reflect"}, + {"reflectionFilterChannel": "reflectionFilter"}, + {"reflectGlossinessChannel": "reflGloss"}, + {"refractChannel": "refract"}, + {"refractionFilterChannel": "refractionFilter"}, + {"refractGlossinessChannel": "refrGloss"}, + {"renderIDChannel": "renderId"}, + {"FastSSS2Channel": "SSS"}, + {"sampleRateChannel": "sampleRate"}, + {"samplerInfo": "samplerInfo"}, + {"selfIllumChannel": "selfIllum"}, + {"shadowChannel": "shadow"}, + {"sheenFilterChannel": "sheen_filter"}, + {"sheenGlossinessChannel": "sheenGloss"}, + {"sheenReflectionChannel": "sheen_reflection"}, + {"vraySheenChannel": "sheen_specular"}, + {"specularChannel": "specular"}, + {"Toon": "Toon"}, + {"toonLightingChannel": "toonLighting"}, + {"toonSpecularChannel": "toonSpecular"}, + {"totalLightChannel": "totalLight"}, + {"unclampedColorChannel": "unclampedColor"}, + {"VRScansPaintMaskChannel": "VRScansPaintMask"}, + {"VRScansZoneMaskChannel": "VRScansZoneMask"}, + {"velocityChannel": "velocity"}, + {"zdepthChannel": "zDepth"}, + {"LightSelectElement": "lightselect"} + ] + }, + { + "type": "label", + "label": "Add additional options - put attribute and value, like aaFilterSize" + }, + { + "type": "dict-modifiable", + "key": "additional_options", + "label": "Additional Renderer Options", + "use_label_wrap": true, + "object_type": { + "type": "text" + } + } + ] + }, + { + "type": "dict", + "collapsible": true, + "key": "redshift_renderer", + "label": "Redshift Renderer", + "is_group": true, + "children": [ + { + "key": "image_prefix", + "label": "Image prefix template", + "type": "text" + }, + { + "key": "primary_gi_engine", + "label": "Primary GI Engine", + "type": "enum", + "multiselection": false, + "defaults": "0", + "enum_items": [ + {"0": "None"}, + {"1": "Photon Map"}, + {"2": "Irradiance Cache"}, + {"3": "Brute Force"} + ] + }, + { + "key": "secondary_gi_engine", + "label": "Secondary GI Engine", + "type": "enum", + "multiselection": false, + "defaults": "0", + "enum_items": [ + {"0": "None"}, + {"1": "Photon Map"}, + {"2": "Irradiance Cache"}, + {"3": "Brute Force"} + ] + }, + { + "key": "image_format", + "label": "Output Image Format", + "type": "enum", + "multiselection": false, + "defaults": "exr", + "enum_items": [ + {"iff": "Maya IFF"}, + {"exr": "OpenEXR"}, + {"tif": "TIFF"}, + {"png": "PNG"}, + {"tga": "Targa"}, + {"jpg": "JPEG"} + ] + }, + { + "key": "multilayer_exr", + "label": "Multilayer (exr)", + "type": "boolean" + }, + { + "key": "force_combine", + "label": "Force combine beauty and AOVs", + "type": "boolean" + }, + { + "key": "aov_list", + "label": "AOVs to create", + "type": "enum", + "multiselection": true, + "defaults": "empty", + "enum_items": [ + {"empty": "< none >"}, + {"AO": "Ambient Occlusion"}, + {"Background": "Background"}, + {"Beauty": "Beauty"}, + {"BumpNormals": "Bump Normals"}, + {"Caustics": "Caustics"}, + {"CausticsRaw": "Caustics Raw"}, + {"Cryptomatte": "Cryptomatte"}, + {"Custom": "Custom"}, + {"Z": "Depth"}, + {"DiffuseFilter": "Diffuse Filter"}, + {"DiffuseLighting": "Diffuse Lighting"}, + {"DiffuseLightingRaw": "Diffuse Lighting Raw"}, + {"Emission": "Emission"}, + {"GI": "Global Illumination"}, + {"GIRaw": "Global Illumination Raw"}, + {"Matte": "Matte"}, + {"MotionVectors": "Ambient Occlusion"}, + {"N": "Normals"}, + {"ID": "ObjectID"}, + {"ObjectBumpNormal": "Object-Space Bump Normals"}, + {"ObjectPosition": "Object-Space Positions"}, + {"PuzzleMatte": "Puzzle Matte"}, + {"Reflections": "Reflections"}, + {"ReflectionsFilter": "Reflections Filter"}, + {"ReflectionsRaw": "Reflections Raw"}, + {"Refractions": "Refractions"}, + {"RefractionsFilter": "Refractions Filter"}, + {"RefractionsRaw": "Refractions Filter"}, + {"Shadows": "Shadows"}, + {"SpecularLighting": "Specular Lighting"}, + {"SSS": "Sub Surface Scatter"}, + {"SSSRaw": "Sub Surface Scatter Raw"}, + {"TotalDiffuseLightingRaw": "Total Diffuse Lighting Raw"}, + {"TotalTransLightingRaw": "Total Translucency Filter"}, + {"TransTint": "Translucency Filter"}, + {"TransGIRaw": "Translucency Lighting Raw"}, + {"VolumeFogEmission": "Volume Fog Emission"}, + {"VolumeFogTint": "Volume Fog Tint"}, + {"VolumeLighting": "Volume Lighting"}, + {"P": "World Position"} + ] + }, + { + "type": "label", + "label": "Add additional options - put attribute and value, like reflectionMaxTraceDepth" + }, + { + "type": "dict-modifiable", + "key": "additional_options", + "label": "Additional Renderer Options", + "use_label_wrap": true, + "object_type": { + "type": "text" + } + } + ] + } + ] +} \ No newline at end of file From 8c4d44fd46ab182ad442e47f3cfe8f7ba9a76f47 Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Mon, 11 Apr 2022 15:22:17 +0300 Subject: [PATCH 085/785] add renderer settings --- .../defaults/project_settings/maya.json | 33 ++++++++++++++++++- 1 file changed, 32 insertions(+), 1 deletion(-) diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index 4cdfe1ca5d..c0b85eb0eb 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -43,8 +43,39 @@ "defaults": [ "Main" ], + "default_render_image_folder": "renders", "aov_separator": "underscore", - "default_render_image_folder": "renders" + "arnold_renderer": { + "image_prefix": "maya///{aov_separator}", + "image_format": "exr", + "multilayer_exr": false, + "tiled": true, + "aov_list": [ + "empty" + ], + "additional_options": {} + }, + "vray_renderer": { + "image_prefix": "maya///", + "engine": "1", + "image_format": "exr", + "aov_list": [ + "empty" + ], + "additional_options": {} + }, + "redshift_renderer": { + "image_prefix": "'maya///{aov_separator}", + "primary_gi_engine": "0", + "secondary_gi_engine": "0", + "image_format": "exr", + "multilayer_exr": false, + "force_combine": false, + "aov_list": [ + "empty" + ], + "additional_options": {} + } }, "CreateUnrealStaticMesh": { "enabled": true, From 2e2deb349d082096d056f878a5cf629c2f95e12c Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 14 Apr 2022 13:06:14 +0200 Subject: [PATCH 086/785] Match changes that were made to original IntegrateAsset Changes of: - https://github.com/pypeclub/OpenPype/commit/312d0309ab92de834629c58587f1a758d1d1e90c - https://github.com/pypeclub/OpenPype/commit/507f3615ab8f42f5664afcac01d339e0517afdf5 - https://github.com/pypeclub/OpenPype/commit/29dca65202d45a79e66c619b95d3408e227a9c05 --- openpype/plugins/publish/integrate_new.py | 61 ++++++++++++++++++----- 1 file changed, 49 insertions(+), 12 deletions(-) diff --git a/openpype/plugins/publish/integrate_new.py b/openpype/plugins/publish/integrate_new.py index 9e8dfefc9e..768c413bf9 100644 --- a/openpype/plugins/publish/integrate_new.py +++ b/openpype/plugins/publish/integrate_new.py @@ -4,6 +4,7 @@ import sys import copy import clique import six +from collections import deque, defaultdict from bson.objectid import ObjectId from pymongo import DeleteMany, ReplaceOne, InsertOne, UpdateOne @@ -871,18 +872,18 @@ class SiteSync(object): attached_sites[remote_site] = create_metadata(remote_site, created=False) + # add alternative sites + cls._add_alternative_sites(system_sync_server_presets, attached_sites) + # add skeleton for sites where it should be always synced to always_accessible_sites = ( sync_project_presets["config"].get("always_accessible_on", []) ) - for site in always_accessible_sites: + for site in set(always_accessible_sites): site = site.strip() if site not in attached_sites: attached_sites[site] = create_metadata(site, created=False) - # add alternative sites - cls._add_alternative_sites(system_sync_server_presets, attached_sites) - return list(attached_sites.values()) @staticmethod @@ -904,8 +905,9 @@ class SiteSync(object): return local_site, remote_site - @staticmethod - def _add_alternative_sites(system_sync_server_presets, + @classmethod + def _add_alternative_sites(cls, + system_sync_server_presets, attached_sites): """Loop through all configured sites and add alternatives. @@ -916,18 +918,14 @@ class SiteSync(object): See SyncServerModule.handle_alternate_site """ conf_sites = system_sync_server_presets.get("sites", {}) + alt_site_pairs = cls._get_alt_site_pairs(conf_sites) - for site_name, site_info in conf_sites.items(): + for site_name, alt_sites in alt_site_pairs.items(): # Skip if already defined if site_name in attached_sites: continue - # Get alternate sites (stripped names) for this site name - alt_sites = site_info.get("alternative_sites", []) - alt_sites = [site.strip() for site in alt_sites] - alt_sites = set(alt_sites) - # If no alternative sites we don't need to add if not alt_sites: continue @@ -944,3 +942,42 @@ class SiteSync(object): # Note: We change mutable `attached_site` dict in-place attached_sites[site_name] = alt_site_meta + + @staticmethod + def _get_alt_site_pairs(conf_sites): + """Returns dict of site and its alternative sites. + If `site` has alternative site, it means that alt_site has + 'site' as + alternative site + Args: + conf_sites (dict) + Returns: + (dict): {'site': [alternative sites]...} + """ + alt_site_pairs = defaultdict(list) + for site_name, site_info in conf_sites.items(): + alt_sites = set(site_info.get("alternative_sites", [])) + alt_site_pairs[site_name].extend(alt_sites) + + for alt_site in alt_sites: + alt_site_pairs[alt_site].append(site_name) + + for site_name, alt_sites in alt_site_pairs.items(): + sites_queue = deque(alt_sites) + while sites_queue: + alt_site = sites_queue.popleft() + + # safety against wrong config + # {"SFTP": {"alternative_site": "SFTP"} + if alt_site == site_name or alt_site not in alt_site_pairs: + continue + + for alt_alt_site in alt_site_pairs[alt_site]: + if ( + alt_alt_site != site_name + and alt_alt_site not in alt_sites + ): + alt_sites.append(alt_alt_site) + sites_queue.append(alt_alt_site) + + return alt_site_pairs From 0fdd4f1aecd3b5fa09496d4aa48ee605a003e61d Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 14 Apr 2022 13:07:33 +0200 Subject: [PATCH 087/785] Fix indentation --- openpype/plugins/publish/integrate_new.py | 66 +++++++++++------------ 1 file changed, 33 insertions(+), 33 deletions(-) diff --git a/openpype/plugins/publish/integrate_new.py b/openpype/plugins/publish/integrate_new.py index 768c413bf9..4eccce4e81 100644 --- a/openpype/plugins/publish/integrate_new.py +++ b/openpype/plugins/publish/integrate_new.py @@ -943,41 +943,41 @@ class SiteSync(object): # Note: We change mutable `attached_site` dict in-place attached_sites[site_name] = alt_site_meta - @staticmethod - def _get_alt_site_pairs(conf_sites): - """Returns dict of site and its alternative sites. - If `site` has alternative site, it means that alt_site has - 'site' as - alternative site - Args: - conf_sites (dict) - Returns: - (dict): {'site': [alternative sites]...} - """ - alt_site_pairs = defaultdict(list) - for site_name, site_info in conf_sites.items(): - alt_sites = set(site_info.get("alternative_sites", [])) - alt_site_pairs[site_name].extend(alt_sites) + @staticmethod + def _get_alt_site_pairs(conf_sites): + """Returns dict of site and its alternative sites. + If `site` has alternative site, it means that alt_site has + 'site' as + alternative site + Args: + conf_sites (dict) + Returns: + (dict): {'site': [alternative sites]...} + """ + alt_site_pairs = defaultdict(list) + for site_name, site_info in conf_sites.items(): + alt_sites = set(site_info.get("alternative_sites", [])) + alt_site_pairs[site_name].extend(alt_sites) - for alt_site in alt_sites: - alt_site_pairs[alt_site].append(site_name) + for alt_site in alt_sites: + alt_site_pairs[alt_site].append(site_name) - for site_name, alt_sites in alt_site_pairs.items(): - sites_queue = deque(alt_sites) - while sites_queue: - alt_site = sites_queue.popleft() + for site_name, alt_sites in alt_site_pairs.items(): + sites_queue = deque(alt_sites) + while sites_queue: + alt_site = sites_queue.popleft() - # safety against wrong config - # {"SFTP": {"alternative_site": "SFTP"} - if alt_site == site_name or alt_site not in alt_site_pairs: - continue + # safety against wrong config + # {"SFTP": {"alternative_site": "SFTP"} + if alt_site == site_name or alt_site not in alt_site_pairs: + continue - for alt_alt_site in alt_site_pairs[alt_site]: - if ( - alt_alt_site != site_name - and alt_alt_site not in alt_sites - ): - alt_sites.append(alt_alt_site) - sites_queue.append(alt_alt_site) + for alt_alt_site in alt_site_pairs[alt_site]: + if ( + alt_alt_site != site_name + and alt_alt_site not in alt_sites + ): + alt_sites.append(alt_alt_site) + sites_queue.append(alt_alt_site) - return alt_site_pairs + return alt_site_pairs From 1a03bbe48a37cc62918152985488a0bd99d43473 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 14 Apr 2022 13:11:57 +0200 Subject: [PATCH 088/785] Store alt sites in a `set` --- openpype/plugins/publish/integrate_new.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/openpype/plugins/publish/integrate_new.py b/openpype/plugins/publish/integrate_new.py index 4eccce4e81..2795b59482 100644 --- a/openpype/plugins/publish/integrate_new.py +++ b/openpype/plugins/publish/integrate_new.py @@ -954,13 +954,13 @@ class SiteSync(object): Returns: (dict): {'site': [alternative sites]...} """ - alt_site_pairs = defaultdict(list) + alt_site_pairs = defaultdict(set) for site_name, site_info in conf_sites.items(): alt_sites = set(site_info.get("alternative_sites", [])) - alt_site_pairs[site_name].extend(alt_sites) + alt_site_pairs[site_name].update(alt_sites) for alt_site in alt_sites: - alt_site_pairs[alt_site].append(site_name) + alt_site_pairs[alt_site].add(site_name) for site_name, alt_sites in alt_site_pairs.items(): sites_queue = deque(alt_sites) @@ -977,7 +977,7 @@ class SiteSync(object): alt_alt_site != site_name and alt_alt_site not in alt_sites ): - alt_sites.append(alt_alt_site) + alt_sites.add(alt_alt_site) sites_queue.append(alt_alt_site) return alt_site_pairs From b64b0a66b06ea6fc8dfaeedc4a7c3bef1f53a609 Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Tue, 19 Apr 2022 20:28:56 +0300 Subject: [PATCH 089/785] add function to grab Arnold settings --- .../maya/plugins/create/create_render.py | 27 ++++++++++--------- 1 file changed, 15 insertions(+), 12 deletions(-) diff --git a/openpype/hosts/maya/plugins/create/create_render.py b/openpype/hosts/maya/plugins/create/create_render.py index 4f0a394f85..84ac8f36ec 100644 --- a/openpype/hosts/maya/plugins/create/create_render.py +++ b/openpype/hosts/maya/plugins/create/create_render.py @@ -431,6 +431,20 @@ class CreateRender(plugin.Creator): kwargs["verify"] = not os.getenv("OPENPYPE_DONT_VERIFY_SSL", True) return requests.get(*args, **kwargs) + def _set_Arnold_settings(self): + """Sets settings for Arnold.""" + + img_ext = self.arnold_renderer.get("image_format") + self._set_global_output_settings() + # Resolution + resWidth = self.attributes.get("resolutionWidth") + resHeight = self.attributes.get("resolutionHeight") + + cmds.setAttr("defaultArnoldDriver.ai_translator", + img_ext, type="string") + cmds.setAttr("defaultResolution.width", resWidth) + cmds.setAttr("defaultResolution.height", resHeight) + def _set_default_renderer_settings(self, renderer): """Set basic settings based on renderer. @@ -448,18 +462,7 @@ class CreateRender(plugin.Creator): if renderer == "arnold": # set format to exr - - cmds.setAttr( - "defaultArnoldDriver.ai_translator", "exr", type="string") - self._set_global_output_settings() - # resolution - cmds.setAttr( - "defaultResolution.width", - asset["data"].get("resolutionWidth")) - cmds.setAttr( - "defaultResolution.height", - asset["data"].get("resolutionHeight")) - + self._set_Arnold_settings() if renderer == "vray": self._set_vray_settings(asset) if renderer == "redshift": From 5d56323050e48951930c0439b88d646b94d98872 Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Tue, 19 Apr 2022 22:50:34 +0300 Subject: [PATCH 090/785] add redshift settings function --- .../hosts/maya/plugins/create/create_render.py | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/openpype/hosts/maya/plugins/create/create_render.py b/openpype/hosts/maya/plugins/create/create_render.py index 84ac8f36ec..f0317ccb9e 100644 --- a/openpype/hosts/maya/plugins/create/create_render.py +++ b/openpype/hosts/maya/plugins/create/create_render.py @@ -466,6 +466,7 @@ class CreateRender(plugin.Creator): if renderer == "vray": self._set_vray_settings(asset) if renderer == "redshift": + self._set_redshift_settings() cmds.setAttr("redshiftOptions.imageFormat", 1) # resolution @@ -478,6 +479,19 @@ class CreateRender(plugin.Creator): self._set_global_output_settings() + def _set_redshift_settings(self): + """Sets settings for Arnold.""" + + img_ext = self.redshift_renderer.get("image_format") + self._set_global_output_settings() + # Resolution + resWidth = self.attributes.get("resolutionWidth") + resHeight = self.attributes.get("resolutionHeight") + + cmds.setAttr("redshiftOptions.imageFormat", img_ext) + cmds.setAttr("defaultResolution.width", resWidth) + cmds.setAttr("defaultResolution.height", resHeight) + def _set_vray_settings(self, asset): # type: (dict) -> None """Sets important settings for Vray.""" From b62fa7451b4a69460e7a2e26c4a3d0e25ca23353 Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Tue, 19 Apr 2022 23:04:08 +0300 Subject: [PATCH 091/785] replace redshift settings setters with method --- openpype/hosts/maya/plugins/create/create_render.py | 11 ----------- 1 file changed, 11 deletions(-) diff --git a/openpype/hosts/maya/plugins/create/create_render.py b/openpype/hosts/maya/plugins/create/create_render.py index f0317ccb9e..757cc16fda 100644 --- a/openpype/hosts/maya/plugins/create/create_render.py +++ b/openpype/hosts/maya/plugins/create/create_render.py @@ -467,17 +467,6 @@ class CreateRender(plugin.Creator): self._set_vray_settings(asset) if renderer == "redshift": self._set_redshift_settings() - cmds.setAttr("redshiftOptions.imageFormat", 1) - - # resolution - cmds.setAttr( - "defaultResolution.width", - asset["data"].get("resolutionWidth")) - cmds.setAttr( - "defaultResolution.height", - asset["data"].get("resolutionHeight")) - - self._set_global_output_settings() def _set_redshift_settings(self): """Sets settings for Arnold.""" From 7ea7a0f5f5827ab20160a8ad635c5613179a4352 Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Wed, 20 Apr 2022 23:40:42 +0300 Subject: [PATCH 092/785] remove extra code in render creator --- openpype/hosts/maya/api/render_settings.py | 24 ++++++++++++------- .../maya/plugins/create/create_render.py | 4 +--- 2 files changed, 17 insertions(+), 11 deletions(-) diff --git a/openpype/hosts/maya/api/render_settings.py b/openpype/hosts/maya/api/render_settings.py index 48bf7fa56c..2614ca23e2 100644 --- a/openpype/hosts/maya/api/render_settings.py +++ b/openpype/hosts/maya/api/render_settings.py @@ -54,6 +54,20 @@ class RenderSettings(object): render_settings = RenderSettings(project_settings) render_settings.set_default_renderer_settings(renderer) + def _set_Arnold_settings(self): + """Sets settings for Arnold.""" + + img_ext = self.arnold_renderer.get("image_format") + self._set_global_output_settings() + # Resolution + resWidth = self.attributes.get("resolutionWidth") + resHeight = self.attributes.get("resolutionHeight") + + cmds.setAttr("defaultArnoldDriver.ai_translator", + img_ext, type="string") + cmds.setAttr("defaultResolution.width", resWidth) + cmds.setAttr("defaultResolution.height", resHeight) + def set_default_renderer_settings(self, renderer): """Set basic settings based on renderer. @@ -83,14 +97,8 @@ class RenderSettings(object): height = asset["data"].get("resolutionHeight") if renderer == "arnold": - # set format to exr - cmds.setAttr( - "defaultArnoldDriver.ai_translator", "exr", type="string") - self._set_global_output_settings() - - # resolution - cmds.setAttr("defaultResolution.width", width) - cmds.setAttr("defaultResolution.height", height) + # set renderer settings for Arnold from project settings + self._set_Arnold_settings() if renderer == "vray": self._set_vray_settings(aov_separator, width, height) diff --git a/openpype/hosts/maya/plugins/create/create_render.py b/openpype/hosts/maya/plugins/create/create_render.py index 5431cfea57..0ef9665fdf 100644 --- a/openpype/hosts/maya/plugins/create/create_render.py +++ b/openpype/hosts/maya/plugins/create/create_render.py @@ -421,9 +421,7 @@ class CreateRender(plugin.Creator): asset = get_asset() - if renderer == "arnold": - # set format to exr - self._set_Arnold_settings() + if renderer == "vray": self._set_vray_settings(asset) if renderer == "redshift": From 854ec3b762f61d71be5e9358fefb67945057f1dd Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Wed, 20 Apr 2022 23:44:28 +0300 Subject: [PATCH 093/785] add missing get_asset() --- openpype/hosts/maya/plugins/create/create_render.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/maya/plugins/create/create_render.py b/openpype/hosts/maya/plugins/create/create_render.py index 0ef9665fdf..313fb68fa7 100644 --- a/openpype/hosts/maya/plugins/create/create_render.py +++ b/openpype/hosts/maya/plugins/create/create_render.py @@ -14,7 +14,8 @@ from maya.app.renderSetup.model import renderSetup from avalon.api import Session from openpype.api import ( get_system_settings, - get_project_settings + get_project_settings, + get_asset, ) from openpype.hosts.maya.api import ( lib, @@ -421,7 +422,6 @@ class CreateRender(plugin.Creator): asset = get_asset() - if renderer == "vray": self._set_vray_settings(asset) if renderer == "redshift": From 3f488594bea9ecd1feda46987bea09195835a40c Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Thu, 21 Apr 2022 00:50:41 +0300 Subject: [PATCH 094/785] remove unused function --- .../hosts/maya/plugins/create/create_render.py | 14 -------------- 1 file changed, 14 deletions(-) diff --git a/openpype/hosts/maya/plugins/create/create_render.py b/openpype/hosts/maya/plugins/create/create_render.py index 313fb68fa7..e95e39e975 100644 --- a/openpype/hosts/maya/plugins/create/create_render.py +++ b/openpype/hosts/maya/plugins/create/create_render.py @@ -393,20 +393,6 @@ class CreateRender(plugin.Creator): kwargs["verify"] = not os.getenv("OPENPYPE_DONT_VERIFY_SSL", True) return requests.get(*args, **kwargs) - def _set_Arnold_settings(self): - """Sets settings for Arnold.""" - - img_ext = self.arnold_renderer.get("image_format") - self._set_global_output_settings() - # Resolution - resWidth = self.attributes.get("resolutionWidth") - resHeight = self.attributes.get("resolutionHeight") - - cmds.setAttr("defaultArnoldDriver.ai_translator", - img_ext, type="string") - cmds.setAttr("defaultResolution.width", resWidth) - cmds.setAttr("defaultResolution.height", resHeight) - def _set_default_renderer_settings(self, renderer): """Set basic settings based on renderer. From 0bc8ad9694c32eb096b67e1fc2dce335b53b9d1c Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Thu, 21 Apr 2022 00:52:54 +0300 Subject: [PATCH 095/785] change placement of redshift settings function --- openpype/hosts/maya/api/render_settings.py | 13 +++++++++++++ openpype/hosts/maya/plugins/create/create_render.py | 13 ------------- 2 files changed, 13 insertions(+), 13 deletions(-) diff --git a/openpype/hosts/maya/api/render_settings.py b/openpype/hosts/maya/api/render_settings.py index 2614ca23e2..6c741046ed 100644 --- a/openpype/hosts/maya/api/render_settings.py +++ b/openpype/hosts/maya/api/render_settings.py @@ -113,6 +113,19 @@ class RenderSettings(object): self._set_global_output_settings() + def _set_redshift_settings(self): + """Sets settings for Arnold.""" + + img_ext = self.redshift_renderer.get("image_format") + self._set_global_output_settings() + # Resolution + resWidth = self.attributes.get("resolutionWidth") + resHeight = self.attributes.get("resolutionHeight") + + cmds.setAttr("redshiftOptions.imageFormat", img_ext) + cmds.setAttr("defaultResolution.width", resWidth) + cmds.setAttr("defaultResolution.height", resHeight) + def _set_vray_settings(self, aov_separator, width, height): # type: (str, int, int) -> None """Sets important settings for Vray.""" diff --git a/openpype/hosts/maya/plugins/create/create_render.py b/openpype/hosts/maya/plugins/create/create_render.py index e95e39e975..3ab83c5143 100644 --- a/openpype/hosts/maya/plugins/create/create_render.py +++ b/openpype/hosts/maya/plugins/create/create_render.py @@ -413,19 +413,6 @@ class CreateRender(plugin.Creator): if renderer == "redshift": self._set_redshift_settings() - def _set_redshift_settings(self): - """Sets settings for Arnold.""" - - img_ext = self.redshift_renderer.get("image_format") - self._set_global_output_settings() - # Resolution - resWidth = self.attributes.get("resolutionWidth") - resHeight = self.attributes.get("resolutionHeight") - - cmds.setAttr("redshiftOptions.imageFormat", img_ext) - cmds.setAttr("defaultResolution.width", resWidth) - cmds.setAttr("defaultResolution.height", resHeight) - def _set_vray_settings(self, asset): # type: (dict) -> None """Sets important settings for Vray.""" From 71434dee8b500bdabc7073535943b9fbdf047558 Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Thu, 21 Apr 2022 10:48:28 +0300 Subject: [PATCH 096/785] remove unused refactored function --- openpype/hosts/maya/plugins/create/create_render.py | 8 -------- 1 file changed, 8 deletions(-) diff --git a/openpype/hosts/maya/plugins/create/create_render.py b/openpype/hosts/maya/plugins/create/create_render.py index 3ab83c5143..93f305f3b9 100644 --- a/openpype/hosts/maya/plugins/create/create_render.py +++ b/openpype/hosts/maya/plugins/create/create_render.py @@ -456,11 +456,3 @@ class CreateRender(plugin.Creator): cmds.setAttr( "{}.height".format(node), asset["data"].get("resolutionHeight")) - - @staticmethod - def _set_global_output_settings(): - # enable animation - cmds.setAttr("defaultRenderGlobals.outFormatControl", 0) - cmds.setAttr("defaultRenderGlobals.animation", 1) - cmds.setAttr("defaultRenderGlobals.putFrameBeforeExt", 1) - cmds.setAttr("defaultRenderGlobals.extensionPadding", 4) From 50e60acc22e297238be281984bbf871ceadf9ddd Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Thu, 21 Apr 2022 10:57:30 +0300 Subject: [PATCH 097/785] removed unused refactored vray settings func --- .../maya/plugins/create/create_render.py | 44 ------------------- 1 file changed, 44 deletions(-) diff --git a/openpype/hosts/maya/plugins/create/create_render.py b/openpype/hosts/maya/plugins/create/create_render.py index 93f305f3b9..815b2a6b0f 100644 --- a/openpype/hosts/maya/plugins/create/create_render.py +++ b/openpype/hosts/maya/plugins/create/create_render.py @@ -412,47 +412,3 @@ class CreateRender(plugin.Creator): self._set_vray_settings(asset) if renderer == "redshift": self._set_redshift_settings() - - def _set_vray_settings(self, asset): - # type: (dict) -> None - """Sets important settings for Vray.""" - settings = cmds.ls(type="VRaySettingsNode") - node = settings[0] if settings else cmds.createNode("VRaySettingsNode") - - # set separator - # set it in vray menu - if cmds.optionMenuGrp("vrayRenderElementSeparator", exists=True, - q=True): - items = cmds.optionMenuGrp( - "vrayRenderElementSeparator", ill=True, query=True) - - separators = [cmds.menuItem(i, label=True, query=True) for i in items] # noqa: E501 - try: - sep_idx = separators.index(self.aov_separator) - except ValueError: - raise CreatorError( - "AOV character {} not in {}".format( - self.aov_separator, separators)) - - cmds.optionMenuGrp( - "vrayRenderElementSeparator", sl=sep_idx + 1, edit=True) - cmds.setAttr( - "{}.fileNameRenderElementSeparator".format(node), - self.aov_separator, - type="string" - ) - # set format to exr - cmds.setAttr( - "{}.imageFormatStr".format(node), "exr", type="string") - - # animType - cmds.setAttr( - "{}.animType".format(node), 1) - - # resolution - cmds.setAttr( - "{}.width".format(node), - asset["data"].get("resolutionWidth")) - cmds.setAttr( - "{}.height".format(node), - asset["data"].get("resolutionHeight")) From 6f58d72be5e5f23ecdc151eed1d135edd750b5e0 Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Thu, 21 Apr 2022 11:18:40 +0300 Subject: [PATCH 098/785] add/cleanup refactored redshift settings function --- openpype/hosts/maya/api/render_settings.py | 9 +-------- 1 file changed, 1 insertion(+), 8 deletions(-) diff --git a/openpype/hosts/maya/api/render_settings.py b/openpype/hosts/maya/api/render_settings.py index 6c741046ed..75646b858b 100644 --- a/openpype/hosts/maya/api/render_settings.py +++ b/openpype/hosts/maya/api/render_settings.py @@ -104,14 +104,7 @@ class RenderSettings(object): self._set_vray_settings(aov_separator, width, height) if renderer == "redshift": - # set format to exr - cmds.setAttr("RedshiftOptions.imageFormat", 1) - - # resolution - cmds.setAttr("defaultResolution.width", width) - cmds.setAttr("defaultResolution.height", height) - - self._set_global_output_settings() + self._set_redshift_settings() def _set_redshift_settings(self): """Sets settings for Arnold.""" From 105fb3e377e207f32f385fee4e03bcc363ff0b4a Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Thu, 21 Apr 2022 11:19:07 +0300 Subject: [PATCH 099/785] remove refactored default renderer settings func --- .../maya/plugins/create/create_render.py | 20 ------------------- 1 file changed, 20 deletions(-) diff --git a/openpype/hosts/maya/plugins/create/create_render.py b/openpype/hosts/maya/plugins/create/create_render.py index 815b2a6b0f..97f059077f 100644 --- a/openpype/hosts/maya/plugins/create/create_render.py +++ b/openpype/hosts/maya/plugins/create/create_render.py @@ -392,23 +392,3 @@ class CreateRender(plugin.Creator): if "verify" not in kwargs: kwargs["verify"] = not os.getenv("OPENPYPE_DONT_VERIFY_SSL", True) return requests.get(*args, **kwargs) - - def _set_default_renderer_settings(self, renderer): - """Set basic settings based on renderer. - - Args: - renderer (str): Renderer name. - - """ - prefix = self._image_prefixes[renderer] - prefix = prefix.replace("{aov_separator}", self.aov_separator) - cmds.setAttr(self._image_prefix_nodes[renderer], - prefix, - type="string") - - asset = get_asset() - - if renderer == "vray": - self._set_vray_settings(asset) - if renderer == "redshift": - self._set_redshift_settings() From 2cd42298e9b51d8323c1b4a023d74115ebf62bc0 Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Thu, 21 Apr 2022 11:27:57 +0300 Subject: [PATCH 100/785] remove unused import --- openpype/hosts/maya/plugins/create/create_render.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/create/create_render.py b/openpype/hosts/maya/plugins/create/create_render.py index 97f059077f..f6e75c825c 100644 --- a/openpype/hosts/maya/plugins/create/create_render.py +++ b/openpype/hosts/maya/plugins/create/create_render.py @@ -15,7 +15,6 @@ from avalon.api import Session from openpype.api import ( get_system_settings, get_project_settings, - get_asset, ) from openpype.hosts.maya.api import ( lib, From 8a970b123c1697d7db28f31debf4f7113c3c3177 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 21 Apr 2022 11:24:49 +0200 Subject: [PATCH 101/785] Use logic directly from Sync Server module --- openpype/plugins/publish/integrate_new.py | 165 +--------------------- 1 file changed, 6 insertions(+), 159 deletions(-) diff --git a/openpype/plugins/publish/integrate_new.py b/openpype/plugins/publish/integrate_new.py index 2795b59482..cc6856e407 100644 --- a/openpype/plugins/publish/integrate_new.py +++ b/openpype/plugins/publish/integrate_new.py @@ -4,14 +4,13 @@ import sys import copy import clique import six -from collections import deque, defaultdict from bson.objectid import ObjectId from pymongo import DeleteMany, ReplaceOne, InsertOne, UpdateOne import pyblish.api from avalon import io import openpype.api -from datetime import datetime +from openpype.modules import ModulesManager from openpype.lib.profiles_filtering import filter_profiles from openpype.lib.file_transaction import FileTransaction @@ -299,11 +298,12 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): self.log.debug("Retrieving Representation Site Sync information ...") # Get the accessible sites for Site Sync - sites = SiteSync.compute_resource_sync_sites( - system_settings=instance.context.data["system_settings"], - project_settings=instance.context.data["project_settings"] + manager = ModulesManager() + sync_server_module = manager.modules_by_name["sync_server"] + sites = sync_server_module.compute_resource_sync_sites( + project_name=instance.data["projectEntity"]["name"] ) - self.log.debug("Site Sync Sites: {}".format(sites)) + self.log.debug("Sync Server Sites: {}".format(sites)) # Compute the resource file infos once (files belonging to the # version instance instead of an individual representation) so @@ -828,156 +828,3 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): "hash": openpype.api.source_hash(path), "sites": sites } - - -class SiteSync(object): - """Logic for Site Sync Module functionality""" - - @classmethod - def compute_resource_sync_sites(cls, - system_settings, - project_settings): - """Get available resource sync sites""" - - def create_metadata(name, created=True): - """Create sync site metadata for site with `name`""" - metadata = {"name": name} - if created: - metadata["created_dt"] = datetime.now() - return metadata - - default_sites = [create_metadata("studio")] - - # If sync site module is disabled return default fallback site - system_sync_server_presets = system_settings["modules"]["sync_server"] - log.debug("system_sett:: {}".format(system_sync_server_presets)) - if not system_sync_server_presets["enabled"]: - return default_sites - - # If sync site module is disabled in current - # project return default fallback site - sync_project_presets = project_settings["global"]["sync_server"] - if not sync_project_presets["enabled"]: - return default_sites - - local_site, remote_site = cls._get_sites(sync_project_presets) - - # Attached sites metadata by site name - # That is the local site, remote site, the always accesible sites - # and their alternate sites (alias of sites with different protocol) - attached_sites = dict() - attached_sites[local_site] = create_metadata(local_site) - - if remote_site and remote_site != local_site: - attached_sites[remote_site] = create_metadata(remote_site, - created=False) - - # add alternative sites - cls._add_alternative_sites(system_sync_server_presets, attached_sites) - - # add skeleton for sites where it should be always synced to - always_accessible_sites = ( - sync_project_presets["config"].get("always_accessible_on", []) - ) - for site in set(always_accessible_sites): - site = site.strip() - if site not in attached_sites: - attached_sites[site] = create_metadata(site, created=False) - - return list(attached_sites.values()) - - @staticmethod - def _get_sites(sync_project_presets): - """Returns tuple (local_site, remote_site)""" - local_site_id = openpype.api.get_local_site_id() - local_site = sync_project_presets["config"]. \ - get("active_site", "studio").strip() - - if local_site == 'local': - local_site = local_site_id - - remote_site = sync_project_presets["config"].get("remote_site") - if remote_site: - remote_site.strip() - - if remote_site == 'local': - remote_site = local_site_id - - return local_site, remote_site - - @classmethod - def _add_alternative_sites(cls, - system_sync_server_presets, - attached_sites): - """Loop through all configured sites and add alternatives. - - For all sites if an alternative site is detected that has an - accessible site then we can also register to that alternative site - with the same "created" state. So we match the existing data. - - See SyncServerModule.handle_alternate_site - """ - conf_sites = system_sync_server_presets.get("sites", {}) - alt_site_pairs = cls._get_alt_site_pairs(conf_sites) - - for site_name, alt_sites in alt_site_pairs.items(): - - # Skip if already defined - if site_name in attached_sites: - continue - - # If no alternative sites we don't need to add - if not alt_sites: - continue - - # Take a copy of data of the first alternate site that is already - # defined as an attached site to match the same state. - match_meta = next((attached_sites[site] for site in alt_sites - if site in attached_sites), None) - if not match_meta: - continue - - alt_site_meta = copy.deepcopy(match_meta) - alt_site_meta["name"] = site_name - - # Note: We change mutable `attached_site` dict in-place - attached_sites[site_name] = alt_site_meta - - @staticmethod - def _get_alt_site_pairs(conf_sites): - """Returns dict of site and its alternative sites. - If `site` has alternative site, it means that alt_site has - 'site' as - alternative site - Args: - conf_sites (dict) - Returns: - (dict): {'site': [alternative sites]...} - """ - alt_site_pairs = defaultdict(set) - for site_name, site_info in conf_sites.items(): - alt_sites = set(site_info.get("alternative_sites", [])) - alt_site_pairs[site_name].update(alt_sites) - - for alt_site in alt_sites: - alt_site_pairs[alt_site].add(site_name) - - for site_name, alt_sites in alt_site_pairs.items(): - sites_queue = deque(alt_sites) - while sites_queue: - alt_site = sites_queue.popleft() - - # safety against wrong config - # {"SFTP": {"alternative_site": "SFTP"} - if alt_site == site_name or alt_site not in alt_site_pairs: - continue - - for alt_alt_site in alt_site_pairs[alt_site]: - if ( - alt_alt_site != site_name - and alt_alt_site not in alt_sites - ): - alt_sites.add(alt_alt_site) - sites_queue.append(alt_alt_site) - - return alt_site_pairs From ae1acb950bbb69b203c36f19d40e3952eca46bfd Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 21 Apr 2022 14:08:53 +0200 Subject: [PATCH 102/785] Fix: refactor to use correct function --- openpype/plugins/publish/integrate_new.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/plugins/publish/integrate_new.py b/openpype/plugins/publish/integrate_new.py index cc6856e407..419e2b4e4b 100644 --- a/openpype/plugins/publish/integrate_new.py +++ b/openpype/plugins/publish/integrate_new.py @@ -309,9 +309,9 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): # version instance instead of an individual representation) so # we can re-use those file infos per representation anatomy = instance.context.data["anatomy"] - resource_file_infos = self.prepare_file_info(resource_destinations, - sites=sites, - anatomy=anatomy) + resource_file_infos = self.get_files_info(resource_destinations, + sites=sites, + anatomy=anatomy) # Finalize the representations now the published files are integrated # Get 'files' info for representations and its attached resources From 54ff5a8e53449bf4ae895e4ddb142dbe25b0fe95 Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Thu, 21 Apr 2022 20:17:40 +0300 Subject: [PATCH 103/785] remove extra import --- openpype/hosts/maya/plugins/create/create_render.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/create/create_render.py b/openpype/hosts/maya/plugins/create/create_render.py index f6e75c825c..86276c3f77 100644 --- a/openpype/hosts/maya/plugins/create/create_render.py +++ b/openpype/hosts/maya/plugins/create/create_render.py @@ -24,7 +24,6 @@ from openpype.hosts.maya.api import ( from openpype.modules import ModulesManager from openpype.pipeline import CreatorError -from avalon.api import Session class CreateRender(plugin.Creator): """Create *render* instance. From 2867a2f2a0da67112c5a893eddb131fcc3ee6832 Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Thu, 21 Apr 2022 20:18:10 +0300 Subject: [PATCH 104/785] fix redshift comment --- openpype/hosts/maya/api/render_settings.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/maya/api/render_settings.py b/openpype/hosts/maya/api/render_settings.py index 75646b858b..1a2064986e 100644 --- a/openpype/hosts/maya/api/render_settings.py +++ b/openpype/hosts/maya/api/render_settings.py @@ -107,7 +107,7 @@ class RenderSettings(object): self._set_redshift_settings() def _set_redshift_settings(self): - """Sets settings for Arnold.""" + """Sets settings for Redshift.""" img_ext = self.redshift_renderer.get("image_format") self._set_global_output_settings() From 675c7a000601257fbfad78eb2339377e767d226d Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Thu, 21 Apr 2022 22:30:07 +0300 Subject: [PATCH 105/785] replace avalon CreatorError with OP's impl. --- openpype/hosts/maya/api/render_settings.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/maya/api/render_settings.py b/openpype/hosts/maya/api/render_settings.py index 1a2064986e..48026e1510 100644 --- a/openpype/hosts/maya/api/render_settings.py +++ b/openpype/hosts/maya/api/render_settings.py @@ -5,7 +5,7 @@ from openpype.api import ( get_asset) from avalon.api import Session -from avalon.api import CreatorError +from openpype.pipeline import CreatorError class RenderSettings(object): From 74cc8230ea01c27277f5e61414fd3d7b3ec3f81d Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Thu, 21 Apr 2022 22:31:43 +0300 Subject: [PATCH 106/785] remove unused import --- openpype/hosts/maya/plugins/create/create_render.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/create/create_render.py b/openpype/hosts/maya/plugins/create/create_render.py index 86276c3f77..bdd1844b5e 100644 --- a/openpype/hosts/maya/plugins/create/create_render.py +++ b/openpype/hosts/maya/plugins/create/create_render.py @@ -22,7 +22,6 @@ from openpype.hosts.maya.api import ( render_settings ) from openpype.modules import ModulesManager -from openpype.pipeline import CreatorError class CreateRender(plugin.Creator): From 4d4ca196f7808a5007893136dcf5d4c82d84cf21 Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Thu, 21 Apr 2022 23:44:08 +0300 Subject: [PATCH 107/785] remove redundant code --- openpype/hosts/maya/plugins/publish/collect_render.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/collect_render.py b/openpype/hosts/maya/plugins/publish/collect_render.py index 839ead8bd6..ab7b7a78ac 100644 --- a/openpype/hosts/maya/plugins/publish/collect_render.py +++ b/openpype/hosts/maya/plugins/publish/collect_render.py @@ -325,10 +325,6 @@ class CollectMayaRender(pyblish.api.ContextPlugin): if instance.data['family'] == "workfile": instance.data["version"] = context.data["version"] - # Apply each user defined attribute as data - for attr, value in avalon.maya.read(layer).items(): - data[attr] = value - # handle standalone renderers if render_instance.data.get("vrayScene") is True: data["families"].append("vrayscene_render") From 9faa7e0b618f06af28081a521fe4cec4753092d5 Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Fri, 22 Apr 2022 00:03:37 +0300 Subject: [PATCH 108/785] Rename file to match convention. --- .../hosts/maya/api/{render_settings.py => lib_rendersettings.py} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename openpype/hosts/maya/api/{render_settings.py => lib_rendersettings.py} (100%) diff --git a/openpype/hosts/maya/api/render_settings.py b/openpype/hosts/maya/api/lib_rendersettings.py similarity index 100% rename from openpype/hosts/maya/api/render_settings.py rename to openpype/hosts/maya/api/lib_rendersettings.py From 24a1dea3eb8688feec1ae5379d9708fe2f94d95f Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Fri, 22 Apr 2022 00:04:19 +0300 Subject: [PATCH 109/785] Append render settings schema. --- .../projects_schema/schema_project_maya.json | 4 + .../schemas/schema_maya_render_settings.json | 411 ++++++++++++++++++ 2 files changed, 415 insertions(+) create mode 100644 openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_render_settings.json diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_maya.json b/openpype/settings/entities/schemas/projects_schema/schema_project_maya.json index cc70516c72..76a235bc12 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_maya.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_maya.json @@ -51,6 +51,10 @@ "type": "schema", "name": "schema_maya_scriptsmenu" }, + { + "type": "schema", + "name": "schema_maya_render_settings" + }, { "type": "schema", "name": "schema_maya_create" diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_render_settings.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_render_settings.json new file mode 100644 index 0000000000..62e9c9e461 --- /dev/null +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_render_settings.json @@ -0,0 +1,411 @@ +{ + "type": "dict", + "collapsible": true, + "key": "RenderSettings", + "label": "Render Settings", + "checkbox_key": "enabled", + "children": [ + { + "type": "boolean", + "key": "enabled", + "label": "Enabled" + }, + { + "type": "text", + "key": "default_render_image_folder", + "label": "Default render image folder" + }, + { + "key": "aov_separator", + "label": "AOV Separator character", + "type": "enum", + "multiselection": false, + "default": "underscore", + "enum_items": [ + {"dash": "- (dash)"}, + {"underscore": "_ (underscore)"}, + {"dot": ". (dot)"} + ] + }, + { + "type": "dict", + "collapsible": true, + "key": "arnold_renderer", + "label": "Arnold Renderer", + "is_group": true, + "children": [ + { + "key": "image_prefix", + "label": "Image prefix template", + "type": "text" + }, + { + "key": "image_format", + "label": "Output Image Format", + "type": "enum", + "multiselection": false, + "defaults": "exr", + "enum_items": [ + {"jpeg": "jpeg"}, + {"png": "png"}, + {"deepexr": "deep exr"}, + {"tif": "tif"}, + {"exr": "exr"}, + {"maya": "maya"}, + {"mtoa_shaders": "mtoa_shaders"} + ] + }, + { + "key": "multilayer_exr", + "label": "Multilayer (exr)", + "type": "boolean" + }, + { + "key": "tiled", + "label": "Tiled (tif, exr)", + "type": "boolean" + }, + { + "key": "aov_list", + "label": "AOVs to create", + "type": "enum", + "multiselection": true, + "defaults": "empty", + "enum_items": [ + {"empty": "< empty >"}, + {"ID": "ID"}, + {"N": "N"}, + {"P": "P"}, + {"Pref": "Pref"}, + {"RGBA": "RGBA"}, + {"Z": "Z"}, + {"albedo": "albedo"}, + {"background": "background"}, + {"coat": "coat"}, + {"coat_albedo": "coat_albedo"}, + {"coat_direct": "coat_direct"}, + {"coat_indirect": "coat_indirect"}, + {"cputime": "cputime"}, + {"crypto_asset": "crypto_asset"}, + {"crypto_material": "cypto_material"}, + {"crypto_object": "crypto_object"}, + {"diffuse": "diffuse"}, + {"diffuse_albedo": "diffuse_albedo"}, + {"diffuse_direct": "diffuse_direct"}, + {"diffuse_indirect": "diffuse_indirect"}, + {"direct": "direct"}, + {"emission": "emission"}, + {"highlight": "highlight"}, + {"indirect": "indirect"}, + {"motionvector": "motionvector"}, + {"opacity": "opacity"}, + {"raycount": "raycount"}, + {"rim_light": "rim_light"}, + {"shadow": "shadow"}, + {"shadow_diff": "shadow_diff"}, + {"shadow_mask": "shadow_mask"}, + {"shadow_matte": "shadow_matte"}, + {"sheen": "sheen"}, + {"sheen_albedo": "sheen_albedo"}, + {"sheen_direct": "sheen_direct"}, + {"sheen_indirect": "sheen_indirect"}, + {"specular": "specular"}, + {"specular_albedo": "specular_albedo"}, + {"specular_direct": "specular_direct"}, + {"specular_indirect": "specular_indirect"}, + {"sss": "sss"}, + {"sss_albedo": "sss_albedo"}, + {"sss_direct": "sss_direct"}, + {"sss_indirect": "sss_indirect"}, + {"transmission": "transmission"}, + {"transmission_albedo": "transmission_albedo"}, + {"transmission_direct": "transmission_direct"}, + {"transmission_indirect": "transmission_indirect"}, + {"volume": "volume"}, + {"volume_Z": "volume_Z"}, + {"volume_albedo": "volume_albedo"}, + {"volume_direct": "volume_direct"}, + {"volume_indirect": "volume_indirect"}, + {"volume_opacity": "volume_opacity"} + ] + }, + { + "type": "label", + "label": "Add additional options - put attribute and value, like AASamples" + }, + { + "type": "dict-modifiable", + "key": "additional_options", + "label": "Additional Renderer Options", + "use_label_wrap": true, + "object_type": { + "type": "text" + } + } + ] + }, + { + "type": "dict", + "collapsible": true, + "key": "vray_renderer", + "label": "V-Ray Renderer", + "is_group": true, + "children": [ + { + "key": "image_prefix", + "label": "Image prefix template", + "type": "text" + }, + { + "key": "engine", + "label": "Production Engine", + "type": "enum", + "multiselection": false, + "defaults": "1", + "enum_items": [ + {"1": "V-Ray"}, + {"2": "V-Ray GPU"} + ] + }, + { + "key": "image_format", + "label": "Output Image Format", + "type": "enum", + "multiselection": false, + "defaults": "exr", + "enum_items": [ + {"png": "png"}, + {"jpg": "jpg"}, + {"vrimg": "vrimg"}, + {"hdr": "hdr"}, + {"exr": "exr"}, + {"exr (multichannel)": "exr (multichannel)"}, + {"exr (deep)": "exr (deep)"}, + {"tga": "tga"}, + {"bmp": "bmp"}, + {"sgi": "sgi"} + ] + }, + { + "key": "aov_list", + "label": "AOVs to create", + "type": "enum", + "multiselection": true, + "defaults": "empty", + "enum_items": [ + {"empty": "< empty >"}, + {"atmosphereChannel": "atmosphere"}, + {"backgroundChannel": "background"}, + {"bumpNormalsChannel": "bumpnormals"}, + {"causticsChannel": "caustics"}, + {"coatFilterChannel": "coat_filter"}, + {"coatGlossinessChannel": "coatGloss"}, + {"coatReflectionChannel": "coat_reflection"}, + {"vrayCoatChannel": "coat_specular"}, + {"CoverageChannel": "coverage"}, + {"cryptomatteChannel": "cryptomatte"}, + {"customColor": "custom_color"}, + {"drBucketChannel": "DR"}, + {"denoiserChannel": "denoiser"}, + {"diffuseChannel": "diffuse"}, + {"ExtraTexElement": "extraTex"}, + {"giChannel": "GI"}, + {"LightMixElement": "None"}, + {"lightingChannel": "lighting"}, + {"LightingAnalysisChannel": "LightingAnalysis"}, + {"materialIDChannel": "materialID"}, + {"MaterialSelectElement": "materialSelect"}, + {"matteShadowChannel": "matteShadow"}, + {"MultiMatteElement": "multimatte"}, + {"multimatteIDChannel": "multimatteID"}, + {"normalsChannel": "normals"}, + {"nodeIDChannel": "objectId"}, + {"objectSelectChannel": "objectSelect"}, + {"rawCoatFilterChannel": "raw_coat_filter"}, + {"rawCoatReflectionChannel": "raw_coat_reflection"}, + {"rawDiffuseFilterChannel": "rawDiffuseFilter"}, + {"rawGiChannel": "rawGI"}, + {"rawLightChannel": "rawLight"}, + {"rawReflectionChannel": "rawReflection"}, + {"rawReflectionFilterChannel": "rawReflectionFilter"}, + {"rawRefractionChannel": "rawRefraction"}, + {"rawRefractionFilterChannel": "rawRefractionFilter"}, + {"rawShadowChannel": "rawShadow"}, + {"rawSheenFilterChannel": "raw_sheen_filter"}, + {"rawSheenReflectionChannel": "raw_sheen_reflection"}, + {"rawTotalLightChannel": "rawTotalLight"}, + {"reflectIORChannel": "reflIOR"}, + {"reflectChannel": "reflect"}, + {"reflectionFilterChannel": "reflectionFilter"}, + {"reflectGlossinessChannel": "reflGloss"}, + {"refractChannel": "refract"}, + {"refractionFilterChannel": "refractionFilter"}, + {"refractGlossinessChannel": "refrGloss"}, + {"renderIDChannel": "renderId"}, + {"FastSSS2Channel": "SSS"}, + {"sampleRateChannel": "sampleRate"}, + {"samplerInfo": "samplerInfo"}, + {"selfIllumChannel": "selfIllum"}, + {"shadowChannel": "shadow"}, + {"sheenFilterChannel": "sheen_filter"}, + {"sheenGlossinessChannel": "sheenGloss"}, + {"sheenReflectionChannel": "sheen_reflection"}, + {"vraySheenChannel": "sheen_specular"}, + {"specularChannel": "specular"}, + {"Toon": "Toon"}, + {"toonLightingChannel": "toonLighting"}, + {"toonSpecularChannel": "toonSpecular"}, + {"totalLightChannel": "totalLight"}, + {"unclampedColorChannel": "unclampedColor"}, + {"VRScansPaintMaskChannel": "VRScansPaintMask"}, + {"VRScansZoneMaskChannel": "VRScansZoneMask"}, + {"velocityChannel": "velocity"}, + {"zdepthChannel": "zDepth"}, + {"LightSelectElement": "lightselect"} + ] + }, + { + "type": "label", + "label": "Add additional options - put attribute and value, like aaFilterSize" + }, + { + "type": "dict-modifiable", + "key": "additional_options", + "label": "Additional Renderer Options", + "use_label_wrap": true, + "object_type": { + "type": "text" + } + } + ] + }, + { + "type": "dict", + "collapsible": true, + "key": "redshift_renderer", + "label": "Redshift Renderer", + "is_group": true, + "children": [ + { + "key": "image_prefix", + "label": "Image prefix template", + "type": "text" + }, + { + "key": "primary_gi_engine", + "label": "Primary GI Engine", + "type": "enum", + "multiselection": false, + "defaults": "0", + "enum_items": [ + {"0": "None"}, + {"1": "Photon Map"}, + {"2": "Irradiance Cache"}, + {"3": "Brute Force"} + ] + }, + { + "key": "secondary_gi_engine", + "label": "Secondary GI Engine", + "type": "enum", + "multiselection": false, + "defaults": "0", + "enum_items": [ + {"0": "None"}, + {"1": "Photon Map"}, + {"2": "Irradiance Cache"}, + {"3": "Brute Force"} + ] + }, + { + "key": "image_format", + "label": "Output Image Format", + "type": "enum", + "multiselection": false, + "defaults": "exr", + "enum_items": [ + {"iff": "Maya IFF"}, + {"exr": "OpenEXR"}, + {"tif": "TIFF"}, + {"png": "PNG"}, + {"tga": "Targa"}, + {"jpg": "JPEG"} + ] + }, + { + "key": "multilayer_exr", + "label": "Multilayer (exr)", + "type": "boolean" + }, + { + "key": "force_combine", + "label": "Force combine beauty and AOVs", + "type": "boolean" + }, + { + "key": "aov_list", + "label": "AOVs to create", + "type": "enum", + "multiselection": true, + "defaults": "empty", + "enum_items": [ + {"empty": "< none >"}, + {"AO": "Ambient Occlusion"}, + {"Background": "Background"}, + {"Beauty": "Beauty"}, + {"BumpNormals": "Bump Normals"}, + {"Caustics": "Caustics"}, + {"CausticsRaw": "Caustics Raw"}, + {"Cryptomatte": "Cryptomatte"}, + {"Custom": "Custom"}, + {"Z": "Depth"}, + {"DiffuseFilter": "Diffuse Filter"}, + {"DiffuseLighting": "Diffuse Lighting"}, + {"DiffuseLightingRaw": "Diffuse Lighting Raw"}, + {"Emission": "Emission"}, + {"GI": "Global Illumination"}, + {"GIRaw": "Global Illumination Raw"}, + {"Matte": "Matte"}, + {"MotionVectors": "Ambient Occlusion"}, + {"N": "Normals"}, + {"ID": "ObjectID"}, + {"ObjectBumpNormal": "Object-Space Bump Normals"}, + {"ObjectPosition": "Object-Space Positions"}, + {"PuzzleMatte": "Puzzle Matte"}, + {"Reflections": "Reflections"}, + {"ReflectionsFilter": "Reflections Filter"}, + {"ReflectionsRaw": "Reflections Raw"}, + {"Refractions": "Refractions"}, + {"RefractionsFilter": "Refractions Filter"}, + {"RefractionsRaw": "Refractions Filter"}, + {"Shadows": "Shadows"}, + {"SpecularLighting": "Specular Lighting"}, + {"SSS": "Sub Surface Scatter"}, + {"SSSRaw": "Sub Surface Scatter Raw"}, + {"TotalDiffuseLightingRaw": "Total Diffuse Lighting Raw"}, + {"TotalTransLightingRaw": "Total Translucency Filter"}, + {"TransTint": "Translucency Filter"}, + {"TransGIRaw": "Translucency Lighting Raw"}, + {"VolumeFogEmission": "Volume Fog Emission"}, + {"VolumeFogTint": "Volume Fog Tint"}, + {"VolumeLighting": "Volume Lighting"}, + {"P": "World Position"} + ] + }, + { + "type": "label", + "label": "Add additional options - put attribute and value, like reflectionMaxTraceDepth" + }, + { + "type": "dict-modifiable", + "key": "additional_options", + "label": "Additional Renderer Options", + "use_label_wrap": true, + "object_type": { + "type": "text" + } + } + ] + } + ] +} \ No newline at end of file From c853e8440f81123b089b8f329ce6b179a703d8ac Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Fri, 22 Apr 2022 00:11:23 +0300 Subject: [PATCH 110/785] Add comment about pools --- openpype/hosts/maya/plugins/create/create_render.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/maya/plugins/create/create_render.py b/openpype/hosts/maya/plugins/create/create_render.py index bdd1844b5e..b718bbfa9c 100644 --- a/openpype/hosts/maya/plugins/create/create_render.py +++ b/openpype/hosts/maya/plugins/create/create_render.py @@ -18,8 +18,8 @@ from openpype.api import ( ) from openpype.hosts.maya.api import ( lib, - plugin, - render_settings + lib_rendersettings, + plugin ) from openpype.modules import ModulesManager @@ -158,7 +158,7 @@ class CreateRender(plugin.Creator): collection.getSelector().setPattern('*') self.log.info("Applying default render settings..") - render_settings.RenderSettings.apply_defaults() + lib_rendersettings.RenderSettings.apply_defaults() return self.instance def _deadline_webservice_changed(self): @@ -209,7 +209,7 @@ class CreateRender(plugin.Creator): def _create_render_settings(self): """Create instance settings.""" - # get pools + # get pools (slave machines of the render farm) pool_names = [] default_priority = 50 From b5004aeaa5696a14d745d8e93c1113de2bc3b8cc Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Fri, 22 Apr 2022 00:13:23 +0300 Subject: [PATCH 111/785] Add comment about pool_names source --- openpype/hosts/maya/plugins/create/create_render.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/create/create_render.py b/openpype/hosts/maya/plugins/create/create_render.py index b718bbfa9c..e431eb2bf1 100644 --- a/openpype/hosts/maya/plugins/create/create_render.py +++ b/openpype/hosts/maya/plugins/create/create_render.py @@ -253,7 +253,8 @@ class CreateRender(plugin.Creator): # if 'default' server is not between selected, # use first one for initial list of pools. deadline_url = next(iter(self.deadline_servers.values())) - + # Uses function to get pool machines from the assigned deadline + # url in settings pool_names = self._get_deadline_pools(deadline_url) maya_submit_dl = self._project_settings.get( "deadline", {}).get( From 05ed9c5c5396dca022785215c73db3ff8ef6452e Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Fri, 22 Apr 2022 00:19:39 +0300 Subject: [PATCH 112/785] Redshift function cleanup. --- openpype/hosts/maya/api/lib_rendersettings.py | 10 +++------- 1 file changed, 3 insertions(+), 7 deletions(-) diff --git a/openpype/hosts/maya/api/lib_rendersettings.py b/openpype/hosts/maya/api/lib_rendersettings.py index 48026e1510..887cbc775e 100644 --- a/openpype/hosts/maya/api/lib_rendersettings.py +++ b/openpype/hosts/maya/api/lib_rendersettings.py @@ -106,18 +106,14 @@ class RenderSettings(object): if renderer == "redshift": self._set_redshift_settings() - def _set_redshift_settings(self): + def _set_redshift_settings(self, width, height): """Sets settings for Redshift.""" img_ext = self.redshift_renderer.get("image_format") self._set_global_output_settings() - # Resolution - resWidth = self.attributes.get("resolutionWidth") - resHeight = self.attributes.get("resolutionHeight") - cmds.setAttr("redshiftOptions.imageFormat", img_ext) - cmds.setAttr("defaultResolution.width", resWidth) - cmds.setAttr("defaultResolution.height", resHeight) + cmds.setAttr("defaultResolution.width", width) + cmds.setAttr("defaultResolution.height", height) def _set_vray_settings(self, aov_separator, width, height): # type: (str, int, int) -> None From 5969124fbc059f9f6d42db866c5f5a02383e2d4e Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Fri, 22 Apr 2022 00:21:08 +0300 Subject: [PATCH 113/785] Arnold function cleanup. --- openpype/hosts/maya/api/lib_rendersettings.py | 24 ++++++++----------- 1 file changed, 10 insertions(+), 14 deletions(-) diff --git a/openpype/hosts/maya/api/lib_rendersettings.py b/openpype/hosts/maya/api/lib_rendersettings.py index 887cbc775e..13ab0ae6cb 100644 --- a/openpype/hosts/maya/api/lib_rendersettings.py +++ b/openpype/hosts/maya/api/lib_rendersettings.py @@ -54,20 +54,6 @@ class RenderSettings(object): render_settings = RenderSettings(project_settings) render_settings.set_default_renderer_settings(renderer) - def _set_Arnold_settings(self): - """Sets settings for Arnold.""" - - img_ext = self.arnold_renderer.get("image_format") - self._set_global_output_settings() - # Resolution - resWidth = self.attributes.get("resolutionWidth") - resHeight = self.attributes.get("resolutionHeight") - - cmds.setAttr("defaultArnoldDriver.ai_translator", - img_ext, type="string") - cmds.setAttr("defaultResolution.width", resWidth) - cmds.setAttr("defaultResolution.height", resHeight) - def set_default_renderer_settings(self, renderer): """Set basic settings based on renderer. @@ -106,6 +92,16 @@ class RenderSettings(object): if renderer == "redshift": self._set_redshift_settings() + def _set_Arnold_settings(self, width, height): + """Sets settings for Arnold.""" + + img_ext = self.arnold_renderer.get("image_format") + self._set_global_output_settings() + cmds.setAttr("defaultArnoldDriver.ai_translator", + img_ext, type="string") + cmds.setAttr("defaultResolution.width", width) + cmds.setAttr("defaultResolution.height", height) + def _set_redshift_settings(self, width, height): """Sets settings for Redshift.""" From 47e70d33c79787d88d912daa771a007ca0ef101d Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Fri, 22 Apr 2022 00:31:03 +0300 Subject: [PATCH 114/785] add comment about vray file format setting --- openpype/hosts/maya/api/lib_rendersettings.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/maya/api/lib_rendersettings.py b/openpype/hosts/maya/api/lib_rendersettings.py index 13ab0ae6cb..5e0d40e6f9 100644 --- a/openpype/hosts/maya/api/lib_rendersettings.py +++ b/openpype/hosts/maya/api/lib_rendersettings.py @@ -142,7 +142,7 @@ class RenderSettings(object): type="string" ) - # set format to exr + # Set render file format to exr cmds.setAttr("{}.imageFormatStr".format(node), "exr", type="string") # animType From b902b2a7e96008268d0f87493265195297b679f2 Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Fri, 22 Apr 2022 07:55:49 +0300 Subject: [PATCH 115/785] Remove unnecessary checkbox --- .../projects_schema/schemas/schema_maya_render_settings.json | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_render_settings.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_render_settings.json index 62e9c9e461..2f8b9562bf 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_render_settings.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_render_settings.json @@ -3,7 +3,6 @@ "collapsible": true, "key": "RenderSettings", "label": "Render Settings", - "checkbox_key": "enabled", "children": [ { "type": "boolean", From 61b59ef2c4b6f1af5fa4eedc7c1ab361cf603d70 Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Mon, 25 Apr 2022 09:09:38 +0300 Subject: [PATCH 116/785] add checkbox to render settings to apply render settings on creation --- .../projects_schema/schemas/schema_maya_render_settings.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_render_settings.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_render_settings.json index 2f8b9562bf..8a5730fbef 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_render_settings.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_render_settings.json @@ -6,8 +6,8 @@ "children": [ { "type": "boolean", - "key": "enabled", - "label": "Enabled" + "key": "apply_render_settings", + "label": "Apply Render Settings on creation" }, { "type": "text", From 998eb0ee762700c80ca675cbfe3db9d6d8f0e1dd Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Mon, 25 Apr 2022 09:12:55 +0300 Subject: [PATCH 117/785] remove redundant schema settings --- .../schemas/schema_maya_create_render.json | 397 ------------------ 1 file changed, 397 deletions(-) diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create_render.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create_render.json index f4a724cd5c..68ad7ad63d 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create_render.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create_render.json @@ -15,403 +15,6 @@ "key": "defaults", "label": "Default Subsets", "object_type": "text" - }, - { - "type": "text", - "key": "default_render_image_folder", - "label": "Default render image folder" - }, - { - "key": "aov_separator", - "label": "AOV Separator character", - "type": "enum", - "multiselection": false, - "default": "underscore", - "enum_items": [ - {"dash": "- (dash)"}, - {"underscore": "_ (underscore)"}, - {"dot": ". (dot)"} - ] - }, - { - "type": "dict", - "collapsible": true, - "key": "arnold_renderer", - "label": "Arnold Renderer", - "is_group": true, - "children": [ - { - "key": "image_prefix", - "label": "Image prefix template", - "type": "text" - }, - { - "key": "image_format", - "label": "Output Image Format", - "type": "enum", - "multiselection": false, - "defaults": "exr", - "enum_items": [ - {"jpeg": "jpeg"}, - {"png": "png"}, - {"deepexr": "deep exr"}, - {"tif": "tif"}, - {"exr": "exr"}, - {"maya": "maya"}, - {"mtoa_shaders": "mtoa_shaders"} - ] - }, - { - "key": "multilayer_exr", - "label": "Multilayer (exr)", - "type": "boolean" - }, - { - "key": "tiled", - "label": "Tiled (tif, exr)", - "type": "boolean" - }, - { - "key": "aov_list", - "label": "AOVs to create", - "type": "enum", - "multiselection": true, - "defaults": "empty", - "enum_items": [ - {"empty": "< empty >"}, - {"ID": "ID"}, - {"N": "N"}, - {"P": "P"}, - {"Pref": "Pref"}, - {"RGBA": "RGBA"}, - {"Z": "Z"}, - {"albedo": "albedo"}, - {"background": "background"}, - {"coat": "coat"}, - {"coat_albedo": "coat_albedo"}, - {"coat_direct": "coat_direct"}, - {"coat_indirect": "coat_indirect"}, - {"cputime": "cputime"}, - {"crypto_asset": "crypto_asset"}, - {"crypto_material": "cypto_material"}, - {"crypto_object": "crypto_object"}, - {"diffuse": "diffuse"}, - {"diffuse_albedo": "diffuse_albedo"}, - {"diffuse_direct": "diffuse_direct"}, - {"diffuse_indirect": "diffuse_indirect"}, - {"direct": "direct"}, - {"emission": "emission"}, - {"highlight": "highlight"}, - {"indirect": "indirect"}, - {"motionvector": "motionvector"}, - {"opacity": "opacity"}, - {"raycount": "raycount"}, - {"rim_light": "rim_light"}, - {"shadow": "shadow"}, - {"shadow_diff": "shadow_diff"}, - {"shadow_mask": "shadow_mask"}, - {"shadow_matte": "shadow_matte"}, - {"sheen": "sheen"}, - {"sheen_albedo": "sheen_albedo"}, - {"sheen_direct": "sheen_direct"}, - {"sheen_indirect": "sheen_indirect"}, - {"specular": "specular"}, - {"specular_albedo": "specular_albedo"}, - {"specular_direct": "specular_direct"}, - {"specular_indirect": "specular_indirect"}, - {"sss": "sss"}, - {"sss_albedo": "sss_albedo"}, - {"sss_direct": "sss_direct"}, - {"sss_indirect": "sss_indirect"}, - {"transmission": "transmission"}, - {"transmission_albedo": "transmission_albedo"}, - {"transmission_direct": "transmission_direct"}, - {"transmission_indirect": "transmission_indirect"}, - {"volume": "volume"}, - {"volume_Z": "volume_Z"}, - {"volume_albedo": "volume_albedo"}, - {"volume_direct": "volume_direct"}, - {"volume_indirect": "volume_indirect"}, - {"volume_opacity": "volume_opacity"} - ] - }, - { - "type": "label", - "label": "Add additional options - put attribute and value, like AASamples" - }, - { - "type": "dict-modifiable", - "key": "additional_options", - "label": "Additional Renderer Options", - "use_label_wrap": true, - "object_type": { - "type": "text" - } - } - ] - }, - { - "type": "dict", - "collapsible": true, - "key": "vray_renderer", - "label": "V-Ray Renderer", - "is_group": true, - "children": [ - { - "key": "image_prefix", - "label": "Image prefix template", - "type": "text" - }, - { - "key": "engine", - "label": "Production Engine", - "type": "enum", - "multiselection": false, - "defaults": "1", - "enum_items": [ - {"1": "V-Ray"}, - {"2": "V-Ray GPU"} - ] - }, - { - "key": "image_format", - "label": "Output Image Format", - "type": "enum", - "multiselection": false, - "defaults": "exr", - "enum_items": [ - {"png": "png"}, - {"jpg": "jpg"}, - {"vrimg": "vrimg"}, - {"hdr": "hdr"}, - {"exr": "exr"}, - {"exr (multichannel)": "exr (multichannel)"}, - {"exr (deep)": "exr (deep)"}, - {"tga": "tga"}, - {"bmp": "bmp"}, - {"sgi": "sgi"} - ] - }, - { - "key": "aov_list", - "label": "AOVs to create", - "type": "enum", - "multiselection": true, - "defaults": "empty", - "enum_items": [ - {"empty": "< empty >"}, - {"atmosphereChannel": "atmosphere"}, - {"backgroundChannel": "background"}, - {"bumpNormalsChannel": "bumpnormals"}, - {"causticsChannel": "caustics"}, - {"coatFilterChannel": "coat_filter"}, - {"coatGlossinessChannel": "coatGloss"}, - {"coatReflectionChannel": "coat_reflection"}, - {"vrayCoatChannel": "coat_specular"}, - {"CoverageChannel": "coverage"}, - {"cryptomatteChannel": "cryptomatte"}, - {"customColor": "custom_color"}, - {"drBucketChannel": "DR"}, - {"denoiserChannel": "denoiser"}, - {"diffuseChannel": "diffuse"}, - {"ExtraTexElement": "extraTex"}, - {"giChannel": "GI"}, - {"LightMixElement": "None"}, - {"lightingChannel": "lighting"}, - {"LightingAnalysisChannel": "LightingAnalysis"}, - {"materialIDChannel": "materialID"}, - {"MaterialSelectElement": "materialSelect"}, - {"matteShadowChannel": "matteShadow"}, - {"MultiMatteElement": "multimatte"}, - {"multimatteIDChannel": "multimatteID"}, - {"normalsChannel": "normals"}, - {"nodeIDChannel": "objectId"}, - {"objectSelectChannel": "objectSelect"}, - {"rawCoatFilterChannel": "raw_coat_filter"}, - {"rawCoatReflectionChannel": "raw_coat_reflection"}, - {"rawDiffuseFilterChannel": "rawDiffuseFilter"}, - {"rawGiChannel": "rawGI"}, - {"rawLightChannel": "rawLight"}, - {"rawReflectionChannel": "rawReflection"}, - {"rawReflectionFilterChannel": "rawReflectionFilter"}, - {"rawRefractionChannel": "rawRefraction"}, - {"rawRefractionFilterChannel": "rawRefractionFilter"}, - {"rawShadowChannel": "rawShadow"}, - {"rawSheenFilterChannel": "raw_sheen_filter"}, - {"rawSheenReflectionChannel": "raw_sheen_reflection"}, - {"rawTotalLightChannel": "rawTotalLight"}, - {"reflectIORChannel": "reflIOR"}, - {"reflectChannel": "reflect"}, - {"reflectionFilterChannel": "reflectionFilter"}, - {"reflectGlossinessChannel": "reflGloss"}, - {"refractChannel": "refract"}, - {"refractionFilterChannel": "refractionFilter"}, - {"refractGlossinessChannel": "refrGloss"}, - {"renderIDChannel": "renderId"}, - {"FastSSS2Channel": "SSS"}, - {"sampleRateChannel": "sampleRate"}, - {"samplerInfo": "samplerInfo"}, - {"selfIllumChannel": "selfIllum"}, - {"shadowChannel": "shadow"}, - {"sheenFilterChannel": "sheen_filter"}, - {"sheenGlossinessChannel": "sheenGloss"}, - {"sheenReflectionChannel": "sheen_reflection"}, - {"vraySheenChannel": "sheen_specular"}, - {"specularChannel": "specular"}, - {"Toon": "Toon"}, - {"toonLightingChannel": "toonLighting"}, - {"toonSpecularChannel": "toonSpecular"}, - {"totalLightChannel": "totalLight"}, - {"unclampedColorChannel": "unclampedColor"}, - {"VRScansPaintMaskChannel": "VRScansPaintMask"}, - {"VRScansZoneMaskChannel": "VRScansZoneMask"}, - {"velocityChannel": "velocity"}, - {"zdepthChannel": "zDepth"}, - {"LightSelectElement": "lightselect"} - ] - }, - { - "type": "label", - "label": "Add additional options - put attribute and value, like aaFilterSize" - }, - { - "type": "dict-modifiable", - "key": "additional_options", - "label": "Additional Renderer Options", - "use_label_wrap": true, - "object_type": { - "type": "text" - } - } - ] - }, - { - "type": "dict", - "collapsible": true, - "key": "redshift_renderer", - "label": "Redshift Renderer", - "is_group": true, - "children": [ - { - "key": "image_prefix", - "label": "Image prefix template", - "type": "text" - }, - { - "key": "primary_gi_engine", - "label": "Primary GI Engine", - "type": "enum", - "multiselection": false, - "defaults": "0", - "enum_items": [ - {"0": "None"}, - {"1": "Photon Map"}, - {"2": "Irradiance Cache"}, - {"3": "Brute Force"} - ] - }, - { - "key": "secondary_gi_engine", - "label": "Secondary GI Engine", - "type": "enum", - "multiselection": false, - "defaults": "0", - "enum_items": [ - {"0": "None"}, - {"1": "Photon Map"}, - {"2": "Irradiance Cache"}, - {"3": "Brute Force"} - ] - }, - { - "key": "image_format", - "label": "Output Image Format", - "type": "enum", - "multiselection": false, - "defaults": "exr", - "enum_items": [ - {"iff": "Maya IFF"}, - {"exr": "OpenEXR"}, - {"tif": "TIFF"}, - {"png": "PNG"}, - {"tga": "Targa"}, - {"jpg": "JPEG"} - ] - }, - { - "key": "multilayer_exr", - "label": "Multilayer (exr)", - "type": "boolean" - }, - { - "key": "force_combine", - "label": "Force combine beauty and AOVs", - "type": "boolean" - }, - { - "key": "aov_list", - "label": "AOVs to create", - "type": "enum", - "multiselection": true, - "defaults": "empty", - "enum_items": [ - {"empty": "< none >"}, - {"AO": "Ambient Occlusion"}, - {"Background": "Background"}, - {"Beauty": "Beauty"}, - {"BumpNormals": "Bump Normals"}, - {"Caustics": "Caustics"}, - {"CausticsRaw": "Caustics Raw"}, - {"Cryptomatte": "Cryptomatte"}, - {"Custom": "Custom"}, - {"Z": "Depth"}, - {"DiffuseFilter": "Diffuse Filter"}, - {"DiffuseLighting": "Diffuse Lighting"}, - {"DiffuseLightingRaw": "Diffuse Lighting Raw"}, - {"Emission": "Emission"}, - {"GI": "Global Illumination"}, - {"GIRaw": "Global Illumination Raw"}, - {"Matte": "Matte"}, - {"MotionVectors": "Ambient Occlusion"}, - {"N": "Normals"}, - {"ID": "ObjectID"}, - {"ObjectBumpNormal": "Object-Space Bump Normals"}, - {"ObjectPosition": "Object-Space Positions"}, - {"PuzzleMatte": "Puzzle Matte"}, - {"Reflections": "Reflections"}, - {"ReflectionsFilter": "Reflections Filter"}, - {"ReflectionsRaw": "Reflections Raw"}, - {"Refractions": "Refractions"}, - {"RefractionsFilter": "Refractions Filter"}, - {"RefractionsRaw": "Refractions Filter"}, - {"Shadows": "Shadows"}, - {"SpecularLighting": "Specular Lighting"}, - {"SSS": "Sub Surface Scatter"}, - {"SSSRaw": "Sub Surface Scatter Raw"}, - {"TotalDiffuseLightingRaw": "Total Diffuse Lighting Raw"}, - {"TotalTransLightingRaw": "Total Translucency Filter"}, - {"TransTint": "Translucency Filter"}, - {"TransGIRaw": "Translucency Lighting Raw"}, - {"VolumeFogEmission": "Volume Fog Emission"}, - {"VolumeFogTint": "Volume Fog Tint"}, - {"VolumeLighting": "Volume Lighting"}, - {"P": "World Position"} - ] - }, - { - "type": "label", - "label": "Add additional options - put attribute and value, like reflectionMaxTraceDepth" - }, - { - "type": "dict-modifiable", - "key": "additional_options", - "label": "Additional Renderer Options", - "use_label_wrap": true, - "object_type": { - "type": "text" - } - } - ] } ] } \ No newline at end of file From 365a6b3990a2c1480eac139cee340ff0580ff58f Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Mon, 25 Apr 2022 09:32:12 +0300 Subject: [PATCH 118/785] add menu item to OpenPype menu --- openpype/hosts/maya/api/menu.py | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/maya/api/menu.py b/openpype/hosts/maya/api/menu.py index 5f0fc39bf3..133877a63e 100644 --- a/openpype/hosts/maya/api/menu.py +++ b/openpype/hosts/maya/api/menu.py @@ -11,7 +11,7 @@ import avalon.api from openpype.api import BuildWorkfile from openpype.settings import get_project_settings from openpype.tools.utils import host_tools -from openpype.hosts.maya.api import lib +from openpype.hosts.maya.api import lib, lib_rendersettings from .lib import get_main_window, IS_HEADLESS from .commands import reset_frame_range @@ -99,6 +99,15 @@ def install(): cmds.menuItem(divider=True) + cmds.menuItem( + "Set Render Settings", + command=lambda *args: lib_rendersettings.set_default_renderer_settings( # noqa + parent=parent_widget + ) + ) + + cmds.menuItem(divider=True) + cmds.menuItem( "Work Files...", command=lambda *args: host_tools.show_workfiles( From a33a9057cd11706391da12dc38f031741459a811 Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Mon, 25 Apr 2022 09:42:13 +0300 Subject: [PATCH 119/785] modify project settings schema defaults for maya --- .../defaults/project_settings/maya.json | 65 +++++++++---------- 1 file changed, 31 insertions(+), 34 deletions(-) diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index c0b85eb0eb..7dcefeff3f 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -30,6 +30,36 @@ } ] }, + "RenderSettings": { + "apply_render_settings": true, + "default_render_image_folder": "", + "aov_separator": "underscore", + "arnold_renderer": { + "image_prefix": "", + "image_format": "exr", + "multilayer_exr": true, + "tiled": true, + "aov_list": [], + "additional_options": {} + }, + "vray_renderer": { + "image_prefix": "", + "engine": "1", + "image_format": "png", + "aov_list": [], + "additional_options": {} + }, + "redshift_renderer": { + "image_prefix": "", + "primary_gi_engine": "0", + "secondary_gi_engine": "0", + "image_format": "iff", + "multilayer_exr": true, + "force_combine": true, + "aov_list": [], + "additional_options": {} + } + }, "create": { "CreateLook": { "enabled": true, @@ -42,40 +72,7 @@ "enabled": true, "defaults": [ "Main" - ], - "default_render_image_folder": "renders", - "aov_separator": "underscore", - "arnold_renderer": { - "image_prefix": "maya///{aov_separator}", - "image_format": "exr", - "multilayer_exr": false, - "tiled": true, - "aov_list": [ - "empty" - ], - "additional_options": {} - }, - "vray_renderer": { - "image_prefix": "maya///", - "engine": "1", - "image_format": "exr", - "aov_list": [ - "empty" - ], - "additional_options": {} - }, - "redshift_renderer": { - "image_prefix": "'maya///{aov_separator}", - "primary_gi_engine": "0", - "secondary_gi_engine": "0", - "image_format": "exr", - "multilayer_exr": false, - "force_combine": false, - "aov_list": [ - "empty" - ], - "additional_options": {} - } + ] }, "CreateUnrealStaticMesh": { "enabled": true, From 18693cf96f3830a7376109f3015eb47894bb764c Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Mon, 25 Apr 2022 12:00:26 +0300 Subject: [PATCH 120/785] fix function argument, add renderer --- openpype/hosts/maya/api/menu.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/maya/api/menu.py b/openpype/hosts/maya/api/menu.py index 133877a63e..4b79357f0b 100644 --- a/openpype/hosts/maya/api/menu.py +++ b/openpype/hosts/maya/api/menu.py @@ -45,6 +45,7 @@ def install(): parent="MayaWindow" ) + renderer = cmds.getAttr('defaultRenderGlobals.currentRenderer').lower() # Create context menu context_label = "{}, {}".format( avalon.api.Session["AVALON_ASSET"], @@ -101,9 +102,7 @@ def install(): cmds.menuItem( "Set Render Settings", - command=lambda *args: lib_rendersettings.set_default_renderer_settings( # noqa - parent=parent_widget - ) + command=lambda *args: lib_rendersettings.RenderSettings.set_default_renderer_settings(renderer) # noqa ) cmds.menuItem(divider=True) From d3d27576ec01a261f01eb5e2abd05ca6925dff33 Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Mon, 25 Apr 2022 12:13:59 +0300 Subject: [PATCH 121/785] Fix Arnold function missing arguments. --- openpype/hosts/maya/api/lib_rendersettings.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/maya/api/lib_rendersettings.py b/openpype/hosts/maya/api/lib_rendersettings.py index 5e0d40e6f9..cdd65de209 100644 --- a/openpype/hosts/maya/api/lib_rendersettings.py +++ b/openpype/hosts/maya/api/lib_rendersettings.py @@ -8,7 +8,7 @@ from avalon.api import Session from openpype.pipeline import CreatorError -class RenderSettings(object): +class RenderSettzings(object): _image_prefix_nodes = { 'mentalray': 'defaultRenderGlobals.imageFilePrefix', @@ -84,7 +84,7 @@ class RenderSettings(object): if renderer == "arnold": # set renderer settings for Arnold from project settings - self._set_Arnold_settings() + self._set_Arnold_settings(width, height) if renderer == "vray": self._set_vray_settings(aov_separator, width, height) From 09a941acd0cbfa5270d7c8f0b3f3680ae6acc72d Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Mon, 25 Apr 2022 12:18:19 +0300 Subject: [PATCH 122/785] Fix Redshift function missing arguments. --- openpype/hosts/maya/api/lib_rendersettings.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/maya/api/lib_rendersettings.py b/openpype/hosts/maya/api/lib_rendersettings.py index cdd65de209..4362511fc4 100644 --- a/openpype/hosts/maya/api/lib_rendersettings.py +++ b/openpype/hosts/maya/api/lib_rendersettings.py @@ -90,7 +90,7 @@ class RenderSettzings(object): self._set_vray_settings(aov_separator, width, height) if renderer == "redshift": - self._set_redshift_settings() + self._set_redshift_settings(width, height) def _set_Arnold_settings(self, width, height): """Sets settings for Arnold.""" From 6299b01ae6532272a0f18e743aa358fe995c1c12 Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Mon, 25 Apr 2022 12:23:13 +0300 Subject: [PATCH 123/785] Fix accidental typo. --- openpype/hosts/maya/api/lib_rendersettings.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/maya/api/lib_rendersettings.py b/openpype/hosts/maya/api/lib_rendersettings.py index 4362511fc4..33b138fa08 100644 --- a/openpype/hosts/maya/api/lib_rendersettings.py +++ b/openpype/hosts/maya/api/lib_rendersettings.py @@ -8,7 +8,7 @@ from avalon.api import Session from openpype.pipeline import CreatorError -class RenderSettzings(object): +class RenderSettings(object): _image_prefix_nodes = { 'mentalray': 'defaultRenderGlobals.imageFilePrefix', From 1e251ac064a74f3e0b4b8b3fddaf5a42213c6f88 Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Mon, 25 Apr 2022 12:25:02 +0300 Subject: [PATCH 124/785] Remove trailing space. --- openpype/hosts/maya/api/menu.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/maya/api/menu.py b/openpype/hosts/maya/api/menu.py index 4b79357f0b..c1aea4da78 100644 --- a/openpype/hosts/maya/api/menu.py +++ b/openpype/hosts/maya/api/menu.py @@ -45,7 +45,7 @@ def install(): parent="MayaWindow" ) - renderer = cmds.getAttr('defaultRenderGlobals.currentRenderer').lower() + renderer = cmds.getAttr('defaultRenderGlobals.currentRenderer').lower() # Create context menu context_label = "{}, {}".format( avalon.api.Session["AVALON_ASSET"], From 7a63e52a3fa520aa67c6fb9c21a11671fcea4317 Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Tue, 26 Apr 2022 00:12:29 +0300 Subject: [PATCH 125/785] Append Arnold render settings from project settings. --- openpype/hosts/maya/api/lib_rendersettings.py | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/openpype/hosts/maya/api/lib_rendersettings.py b/openpype/hosts/maya/api/lib_rendersettings.py index 33b138fa08..26e2455d86 100644 --- a/openpype/hosts/maya/api/lib_rendersettings.py +++ b/openpype/hosts/maya/api/lib_rendersettings.py @@ -81,10 +81,10 @@ class RenderSettings(object): asset = get_asset() width = asset["data"].get("resolutionWidth") height = asset["data"].get("resolutionHeight") - + arnold_render_presets = self._project_settings["maya"]["RenderSettings"]["arnold_renderer"] if renderer == "arnold": # set renderer settings for Arnold from project settings - self._set_Arnold_settings(width, height) + self._set_Arnold_settings(arnold_render_presets, width, height) if renderer == "vray": self._set_vray_settings(aov_separator, width, height) @@ -92,13 +92,12 @@ class RenderSettings(object): if renderer == "redshift": self._set_redshift_settings(width, height) - def _set_Arnold_settings(self, width, height): + def _set_Arnold_settings(self, settings, width, height): """Sets settings for Arnold.""" - img_ext = self.arnold_renderer.get("image_format") + img_ext = settings["image_format"] self._set_global_output_settings() - cmds.setAttr("defaultArnoldDriver.ai_translator", - img_ext, type="string") + cmds.setAttr("defaultArnoldDriver.ai_translator", img_ext, type="string") cmds.setAttr("defaultResolution.width", width) cmds.setAttr("defaultResolution.height", height) From 14a34836b710052ebc41744ec963ba8d062a03f3 Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Tue, 26 Apr 2022 02:26:38 +0300 Subject: [PATCH 126/785] Add Maya window function call to initalize render objects. --- openpype/hosts/maya/api/lib_rendersettings.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/maya/api/lib_rendersettings.py b/openpype/hosts/maya/api/lib_rendersettings.py index 26e2455d86..1dcea16640 100644 --- a/openpype/hosts/maya/api/lib_rendersettings.py +++ b/openpype/hosts/maya/api/lib_rendersettings.py @@ -1,4 +1,4 @@ -from maya import cmds +from maya import cmds, mel from openpype.api import ( get_project_settings, @@ -94,12 +94,16 @@ class RenderSettings(object): def _set_Arnold_settings(self, settings, width, height): """Sets settings for Arnold.""" + mel.eval('unifiedRenderGlobalsWindow;') + if cmds.window("unifiedRenderGlobalsWindow", exists=True): + cmds.deleteUI("unifiedRenderGlobalsWindow") + + cmds.setAttr("defaultResolution.width", width) + cmds.setAttr("defaultResolution.height", height) img_ext = settings["image_format"] self._set_global_output_settings() cmds.setAttr("defaultArnoldDriver.ai_translator", img_ext, type="string") - cmds.setAttr("defaultResolution.width", width) - cmds.setAttr("defaultResolution.height", height) def _set_redshift_settings(self, width, height): """Sets settings for Redshift.""" From 79d770054125c6d585cdd70cf9eb03d5f5b08f39 Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Tue, 26 Apr 2022 13:30:12 +0300 Subject: [PATCH 127/785] fix asset var name, add relevant comments --- openpype/hosts/maya/api/lib_rendersettings.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/maya/api/lib_rendersettings.py b/openpype/hosts/maya/api/lib_rendersettings.py index 1dcea16640..70ec1ebb47 100644 --- a/openpype/hosts/maya/api/lib_rendersettings.py +++ b/openpype/hosts/maya/api/lib_rendersettings.py @@ -78,10 +78,12 @@ class RenderSettings(object): prefix, type="string") - asset = get_asset() - width = asset["data"].get("resolutionWidth") - height = asset["data"].get("resolutionHeight") + asset_doc = get_asset() + # TODO: handle not having res values in the doc + width = asset_doc["data"].get("resolutionWidth") + height = asset_doc["data"].get("resolutionHeight")# TODO: don't camelcase arnold_render_presets = self._project_settings["maya"]["RenderSettings"]["arnold_renderer"] + if renderer == "arnold": # set renderer settings for Arnold from project settings self._set_Arnold_settings(arnold_render_presets, width, height) From fb424f672609b91cec693e2c13bdf224b9c4a998 Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Tue, 26 Apr 2022 15:12:15 +0300 Subject: [PATCH 128/785] replace render settings workaround with function call --- openpype/hosts/maya/api/lib_rendersettings.py | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/openpype/hosts/maya/api/lib_rendersettings.py b/openpype/hosts/maya/api/lib_rendersettings.py index 70ec1ebb47..13317cf85e 100644 --- a/openpype/hosts/maya/api/lib_rendersettings.py +++ b/openpype/hosts/maya/api/lib_rendersettings.py @@ -1,4 +1,5 @@ -from maya import cmds, mel +from maya import cmds +from mtoa.core import createOptions from openpype.api import ( get_project_settings, @@ -96,11 +97,7 @@ class RenderSettings(object): def _set_Arnold_settings(self, settings, width, height): """Sets settings for Arnold.""" - mel.eval('unifiedRenderGlobalsWindow;') - - if cmds.window("unifiedRenderGlobalsWindow", exists=True): - cmds.deleteUI("unifiedRenderGlobalsWindow") - + createOptions() cmds.setAttr("defaultResolution.width", width) cmds.setAttr("defaultResolution.height", height) img_ext = settings["image_format"] From 6645be26ef175de388b36096127e6696e1028947 Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Mon, 2 May 2022 13:34:49 +0300 Subject: [PATCH 129/785] minor style/import fixes --- openpype/hosts/maya/api/lib_renderproducts.py | 1 + openpype/hosts/maya/plugins/create/create_render.py | 3 --- 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/openpype/hosts/maya/api/lib_renderproducts.py b/openpype/hosts/maya/api/lib_renderproducts.py index f62432b2e9..1f38ef8904 100644 --- a/openpype/hosts/maya/api/lib_renderproducts.py +++ b/openpype/hosts/maya/api/lib_renderproducts.py @@ -81,6 +81,7 @@ IMAGE_PREFIXES = { RENDERMAN_IMAGE_DIR = "maya//" + def has_tokens(string, tokens): """Return whether any of tokens is in input string (case-insensitive)""" pattern = "({})".format("|".join(re.escape(token) for token in tokens)) diff --git a/openpype/hosts/maya/plugins/create/create_render.py b/openpype/hosts/maya/plugins/create/create_render.py index 4e36f922d9..6b65911cf3 100644 --- a/openpype/hosts/maya/plugins/create/create_render.py +++ b/openpype/hosts/maya/plugins/create/create_render.py @@ -2,11 +2,9 @@ """Create ``Render`` instance in Maya.""" import json import os -import sys import appdirs import requests -import six from maya import cmds from maya.app.renderSetup.model import renderSetup @@ -21,7 +19,6 @@ from openpype.hosts.maya.api import ( lib_rendersettings, plugin ) -from openpype.modules import ModulesManager class CreateRender(plugin.Creator): From 2cdea369dc1849243fc926301ec4780a42774fee Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Mon, 2 May 2022 13:35:40 +0300 Subject: [PATCH 130/785] Remove avalon import. --- openpype/hosts/maya/plugins/create/create_render.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/create/create_render.py b/openpype/hosts/maya/plugins/create/create_render.py index 6b65911cf3..2bbaf1006d 100644 --- a/openpype/hosts/maya/plugins/create/create_render.py +++ b/openpype/hosts/maya/plugins/create/create_render.py @@ -9,7 +9,6 @@ import requests from maya import cmds from maya.app.renderSetup.model import renderSetup -from avalon.api import Session from openpype.api import ( get_system_settings, get_project_settings, From e0b0e30734b8c036fbec4f65c8c2b678be59d053 Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Mon, 2 May 2022 13:54:09 +0300 Subject: [PATCH 131/785] replace avalon dependency with legacy_io --- openpype/hosts/maya/api/lib_rendersettings.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/maya/api/lib_rendersettings.py b/openpype/hosts/maya/api/lib_rendersettings.py index 13317cf85e..4e1c4f7bd2 100644 --- a/openpype/hosts/maya/api/lib_rendersettings.py +++ b/openpype/hosts/maya/api/lib_rendersettings.py @@ -5,7 +5,7 @@ from openpype.api import ( get_project_settings, get_asset) -from avalon.api import Session +from openpype.pipeline import legacy_io from openpype.pipeline import CreatorError @@ -50,7 +50,7 @@ class RenderSettings(object): renderer = 'renderman' if project_settings is None: - project_settings = get_project_settings(Session["AVALON_PROJECT"]) + project_settings = get_project_settings(legacy_io.Session["AVALON_PROJECT"]) render_settings = RenderSettings(project_settings) render_settings.set_default_renderer_settings(renderer) @@ -97,6 +97,7 @@ class RenderSettings(object): def _set_Arnold_settings(self, settings, width, height): """Sets settings for Arnold.""" + createOptions() cmds.setAttr("defaultResolution.width", width) cmds.setAttr("defaultResolution.height", height) From 302e493f617a9d027243a8ca661f99b482bbbb55 Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Mon, 2 May 2022 13:54:39 +0300 Subject: [PATCH 132/785] Change import position. --- openpype/hosts/maya/api/lib_rendersettings.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/openpype/hosts/maya/api/lib_rendersettings.py b/openpype/hosts/maya/api/lib_rendersettings.py index 4e1c4f7bd2..5afcd94758 100644 --- a/openpype/hosts/maya/api/lib_rendersettings.py +++ b/openpype/hosts/maya/api/lib_rendersettings.py @@ -1,5 +1,4 @@ from maya import cmds -from mtoa.core import createOptions from openpype.api import ( get_project_settings, @@ -97,7 +96,7 @@ class RenderSettings(object): def _set_Arnold_settings(self, settings, width, height): """Sets settings for Arnold.""" - + from mtoa.core import createOptions createOptions() cmds.setAttr("defaultResolution.width", width) cmds.setAttr("defaultResolution.height", height) From e9afd17b296677b1558b83469cdf68cd3cf4e555 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 6 May 2022 10:02:20 +0200 Subject: [PATCH 133/785] converted event to be able create multiple independent event systems --- openpype/lib/events.py | 76 +++++++++++++++++++++++++++++++++--------- 1 file changed, 61 insertions(+), 15 deletions(-) diff --git a/openpype/lib/events.py b/openpype/lib/events.py index 7bec6ee30d..3762cec9f9 100644 --- a/openpype/lib/events.py +++ b/openpype/lib/events.py @@ -11,6 +11,10 @@ except Exception: from openpype.lib.python_2_comp import WeakMethod +class MissingEventSystem(Exception): + pass + + class EventCallback(object): """Callback registered to a topic. @@ -179,13 +183,14 @@ class Event(object): """ _data = {} - def __init__(self, topic, data=None, source=None): + def __init__(self, topic, data=None, source=None, event_system=None): self._id = str(uuid4()) self._topic = topic if data is None: data = {} self._data = data self._source = source + self._event_system = event_system def __getitem__(self, key): return self._data[key] @@ -211,28 +216,69 @@ class Event(object): def emit(self): """Emit event and trigger callbacks.""" - StoredCallbacks.emit_event(self) + if self._event_system is None: + raise MissingEventSystem( + "Can't emit event {}. Does not have set event system.".format( + str(repr(self)) + ) + ) + self._event_system.emit_event(self) -class StoredCallbacks: - _registered_callbacks = [] +class EventSystem(object): + def __init__(self): + self._registered_callbacks = [] - @classmethod - def add_callback(cls, topic, callback): + def add_callback(self, topic, callback): callback = EventCallback(topic, callback) - cls._registered_callbacks.append(callback) + self._registered_callbacks.append(callback) return callback - @classmethod - def emit_event(cls, event): + def create_event(self, topic, data, source): + return Event(topic, data, source, self) + + def emit(self, topic, data, source): + event = self.create_event(topic, data, source) + event.emit() + return event + + def emit_event(self, event): invalid_callbacks = [] - for callback in cls._registered_callbacks: + for callback in self._registered_callbacks: callback.process_event(event) if not callback.is_ref_valid: invalid_callbacks.append(callback) for callback in invalid_callbacks: - cls._registered_callbacks.remove(callback) + self._registered_callbacks.remove(callback) + + +class GlobalEvent(Event): + def __init__(self, topic, data=None, source=None): + event_system = GlobalEventSystem.get_global_event_system() + + super(GlobalEvent, self).__init__(topic, data, source, event_system) + + +class GlobalEventSystem: + _global_event_system = None + + @classmethod + def get_global_event_system(cls): + if cls._global_event_system is None: + cls._global_event_system = EventSystem() + return cls._global_event_system + + @classmethod + def add_callback(cls, topic, callback): + event_system = cls.get_global_event_system() + return event_system.add_callback(topic, callback) + + @classmethod + def emit(cls, topic, data, source): + event = GlobalEvent(topic, data, source) + event.emit() + return event def register_event_callback(topic, callback): @@ -249,7 +295,8 @@ def register_event_callback(topic, callback): enable/disable listening to a topic or remove the callback from the topic completely. """ - return StoredCallbacks.add_callback(topic, callback) + + return GlobalEventSystem.add_callback(topic, callback) def emit_event(topic, data=None, source=None): @@ -263,6 +310,5 @@ def emit_event(topic, data=None, source=None): Returns: Event: Object of event that was emitted. """ - event = Event(topic, data, source) - event.emit() - return event + + return GlobalEventSystem.emit(topic, data, source) From 103cd8c18093bd4af8e2be23795107b6e87c910a Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Fri, 6 May 2022 12:40:18 +0300 Subject: [PATCH 134/785] Move settings getter function --- openpype/hosts/maya/api/lib_rendersettings.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/maya/api/lib_rendersettings.py b/openpype/hosts/maya/api/lib_rendersettings.py index 5afcd94758..7b2145b7ac 100644 --- a/openpype/hosts/maya/api/lib_rendersettings.py +++ b/openpype/hosts/maya/api/lib_rendersettings.py @@ -81,8 +81,7 @@ class RenderSettings(object): asset_doc = get_asset() # TODO: handle not having res values in the doc width = asset_doc["data"].get("resolutionWidth") - height = asset_doc["data"].get("resolutionHeight")# TODO: don't camelcase - arnold_render_presets = self._project_settings["maya"]["RenderSettings"]["arnold_renderer"] + height = asset_doc["data"].get("resolutionHeight") if renderer == "arnold": # set renderer settings for Arnold from project settings @@ -94,13 +93,16 @@ class RenderSettings(object): if renderer == "redshift": self._set_redshift_settings(width, height) - def _set_Arnold_settings(self, settings, width, height): + def _set_Arnold_settings(self, width, height): """Sets settings for Arnold.""" from mtoa.core import createOptions createOptions() + arnold_render_presets = self._project_settings["maya"]["RenderSettings"]["arnold_renderer"] # noqa + img_ext = arnold_render_presets["image_format"] + cmds.setAttr("defaultResolution.width", width) cmds.setAttr("defaultResolution.height", height) - img_ext = settings["image_format"] + self._set_global_output_settings() cmds.setAttr("defaultArnoldDriver.ai_translator", img_ext, type="string") From 2bb96a90a49114080dc1e4bb7ad978c4b5ecaa05 Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Fri, 6 May 2022 13:55:53 +0300 Subject: [PATCH 135/785] Append aov handling --- openpype/hosts/maya/api/lib_rendersettings.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/openpype/hosts/maya/api/lib_rendersettings.py b/openpype/hosts/maya/api/lib_rendersettings.py index 7b2145b7ac..582bdc224a 100644 --- a/openpype/hosts/maya/api/lib_rendersettings.py +++ b/openpype/hosts/maya/api/lib_rendersettings.py @@ -96,9 +96,14 @@ class RenderSettings(object): def _set_Arnold_settings(self, width, height): """Sets settings for Arnold.""" from mtoa.core import createOptions + from mtoa.aovs import AOVInterface createOptions() arnold_render_presets = self._project_settings["maya"]["RenderSettings"]["arnold_renderer"] # noqa img_ext = arnold_render_presets["image_format"] + aovs = arnold_render_presets["aov_list"] + + for aov in aovs: + AOVInterface('defaultArnoldRenderOptions'.addAOV(aov)) cmds.setAttr("defaultResolution.width", width) cmds.setAttr("defaultResolution.height", height) From f6d442330de7585fb1ce76389249fb6c43b6d5c4 Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Mon, 9 May 2022 12:48:25 +0300 Subject: [PATCH 136/785] Get renderer from within settings function --- openpype/hosts/maya/api/lib_rendersettings.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/maya/api/lib_rendersettings.py b/openpype/hosts/maya/api/lib_rendersettings.py index 582bdc224a..64e3d07a44 100644 --- a/openpype/hosts/maya/api/lib_rendersettings.py +++ b/openpype/hosts/maya/api/lib_rendersettings.py @@ -54,13 +54,15 @@ class RenderSettings(object): render_settings = RenderSettings(project_settings) render_settings.set_default_renderer_settings(renderer) - def set_default_renderer_settings(self, renderer): + @staticmethod + def set_default_renderer_settings(self): """Set basic settings based on renderer. Args: renderer (str): Renderer name. """ + renderer = cmds.getAttr('defaultRenderGlobals.currentRenderer').lower() # project_settings/maya/create/CreateRender/aov_separator try: aov_separator = self._aov_chars[( From ad6f562f80f1c916eae3226bae282e4c55eedacf Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Mon, 9 May 2022 13:30:40 +0300 Subject: [PATCH 137/785] Remove unused parameter --- openpype/hosts/maya/api/lib_rendersettings.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/maya/api/lib_rendersettings.py b/openpype/hosts/maya/api/lib_rendersettings.py index 64e3d07a44..13be2a1e26 100644 --- a/openpype/hosts/maya/api/lib_rendersettings.py +++ b/openpype/hosts/maya/api/lib_rendersettings.py @@ -87,7 +87,7 @@ class RenderSettings(object): if renderer == "arnold": # set renderer settings for Arnold from project settings - self._set_Arnold_settings(arnold_render_presets, width, height) + self._set_Arnold_settings(width, height) if renderer == "vray": self._set_vray_settings(aov_separator, width, height) From e4324a11f7f11a5b76c48388b53b25c3d63d0efe Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Mon, 9 May 2022 13:34:11 +0300 Subject: [PATCH 138/785] Move get_asset() --- openpype/hosts/maya/api/lib_rendersettings.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/maya/api/lib_rendersettings.py b/openpype/hosts/maya/api/lib_rendersettings.py index 13be2a1e26..73f03975bb 100644 --- a/openpype/hosts/maya/api/lib_rendersettings.py +++ b/openpype/hosts/maya/api/lib_rendersettings.py @@ -63,6 +63,7 @@ class RenderSettings(object): """ renderer = cmds.getAttr('defaultRenderGlobals.currentRenderer').lower() + asset_doc = get_asset() # project_settings/maya/create/CreateRender/aov_separator try: aov_separator = self._aov_chars[( @@ -80,7 +81,7 @@ class RenderSettings(object): prefix, type="string") - asset_doc = get_asset() + # TODO: handle not having res values in the doc width = asset_doc["data"].get("resolutionWidth") height = asset_doc["data"].get("resolutionHeight") From bf1daa4e906ef36afcbaf20b21578106b7e24f41 Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Mon, 9 May 2022 13:34:28 +0300 Subject: [PATCH 139/785] style fix --- openpype/hosts/maya/api/lib_rendersettings.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/hosts/maya/api/lib_rendersettings.py b/openpype/hosts/maya/api/lib_rendersettings.py index 73f03975bb..3ac663e38d 100644 --- a/openpype/hosts/maya/api/lib_rendersettings.py +++ b/openpype/hosts/maya/api/lib_rendersettings.py @@ -81,7 +81,6 @@ class RenderSettings(object): prefix, type="string") - # TODO: handle not having res values in the doc width = asset_doc["data"].get("resolutionWidth") height = asset_doc["data"].get("resolutionHeight") From bb67065d39a092383ce1a277cb68f23d2302c225 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Tue, 10 May 2022 12:14:15 +0200 Subject: [PATCH 140/785] few style changes --- openpype/hosts/maya/api/lib_rendersettings.py | 61 +++++++++++-------- openpype/hosts/maya/api/menu.py | 2 +- 2 files changed, 37 insertions(+), 26 deletions(-) diff --git a/openpype/hosts/maya/api/lib_rendersettings.py b/openpype/hosts/maya/api/lib_rendersettings.py index 3ac663e38d..03c70ee3d6 100644 --- a/openpype/hosts/maya/api/lib_rendersettings.py +++ b/openpype/hosts/maya/api/lib_rendersettings.py @@ -1,4 +1,8 @@ -from maya import cmds +# -*- coding: utf-8 -*- +"""Class for handling Render Settings.""" +from maya import cmds # noqa +import six +import sys from openpype.api import ( get_project_settings, @@ -36,8 +40,12 @@ class RenderSettings(object): def get_image_prefix_attr(cls, renderer): return cls._image_prefix_nodes[renderer] - def __init__(self, project_settings): + def __init__(self, project_settings=None): self._project_settings = project_settings + if not self._project_settings: + self._project_settings = get_project_settings( + legacy_io.Session["AVALON_PROJECT"] + ) @staticmethod def apply_defaults(renderer=None, project_settings=None): @@ -48,21 +56,15 @@ class RenderSettings(object): if renderer.startswith('renderman'): renderer = 'renderman' - if project_settings is None: - project_settings = get_project_settings(legacy_io.Session["AVALON_PROJECT"]) - render_settings = RenderSettings(project_settings) render_settings.set_default_renderer_settings(renderer) - @staticmethod - def set_default_renderer_settings(self): - """Set basic settings based on renderer. + def set_default_renderer_settings(self, renderer=None): + """Set basic settings based on renderer.""" + if not renderer: + renderer = cmds.getAttr( + 'defaultRenderGlobals.currentRenderer').lower() - Args: - renderer (str): Renderer name. - - """ - renderer = cmds.getAttr('defaultRenderGlobals.currentRenderer').lower() asset_doc = get_asset() # project_settings/maya/create/CreateRender/aov_separator try: @@ -87,7 +89,7 @@ class RenderSettings(object): if renderer == "arnold": # set renderer settings for Arnold from project settings - self._set_Arnold_settings(width, height) + self._set_arnold_settings(width, height) if renderer == "vray": self._set_vray_settings(aov_separator, width, height) @@ -95,28 +97,34 @@ class RenderSettings(object): if renderer == "redshift": self._set_redshift_settings(width, height) - def _set_Arnold_settings(self, width, height): + def _set_arnold_settings(self, width, height): """Sets settings for Arnold.""" - from mtoa.core import createOptions - from mtoa.aovs import AOVInterface + from mtoa.core import createOptions # noqa + from mtoa.aovs import AOVInterface # noqa createOptions() arnold_render_presets = self._project_settings["maya"]["RenderSettings"]["arnold_renderer"] # noqa img_ext = arnold_render_presets["image_format"] aovs = arnold_render_presets["aov_list"] for aov in aovs: - AOVInterface('defaultArnoldRenderOptions'.addAOV(aov)) + AOVInterface('defaultArnoldRenderOptions').addAOV(aov) cmds.setAttr("defaultResolution.width", width) cmds.setAttr("defaultResolution.height", height) self._set_global_output_settings() - cmds.setAttr("defaultArnoldDriver.ai_translator", img_ext, type="string") + cmds.setAttr( + "defaultArnoldDriver.ai_translator", img_ext, type="string") def _set_redshift_settings(self, width, height): """Sets settings for Redshift.""" - - img_ext = self.redshift_renderer.get("image_format") + redshift_render_presets = ( + self._project_settings + ["maya"] + ["RenderSettings"] + ["redshift_renderer"] + ) + img_ext = redshift_render_presets.get("image_format") self._set_global_output_settings() cmds.setAttr("redshiftOptions.imageFormat", img_ext) cmds.setAttr("defaultResolution.width", width) @@ -138,10 +146,13 @@ class RenderSettings(object): separators = [cmds.menuItem(i, query=True, label=True) for i in items] # noqa: E501 try: sep_idx = separators.index(aov_separator) - except ValueError: - raise CreatorError( - "AOV character {} not in {}".format( - aov_separator, separators)) + except ValueError as e: + six.reraise( + CreatorError, + CreatorError( + "AOV character {} not in {}".format( + aov_separator, separators)), + sys.exc_info()[2]) cmds.optionMenuGrp(MENU, edit=True, select=sep_idx + 1) diff --git a/openpype/hosts/maya/api/menu.py b/openpype/hosts/maya/api/menu.py index 3c43c192e3..c3ce8b0227 100644 --- a/openpype/hosts/maya/api/menu.py +++ b/openpype/hosts/maya/api/menu.py @@ -101,7 +101,7 @@ def install(): cmds.menuItem( "Set Render Settings", - command=lambda *args: lib_rendersettings.RenderSettings.set_default_renderer_settings(renderer) # noqa + command=lambda *args: lib_rendersettings.RenderSettings().set_default_renderer_settings() # noqa ) cmds.menuItem(divider=True) From ae0708b639a1981fbefc5331a2b24af50079470a Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Wed, 11 May 2022 12:28:45 +0300 Subject: [PATCH 141/785] Force resetting render settings --- openpype/hosts/maya/api/lib_rendersettings.py | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/maya/api/lib_rendersettings.py b/openpype/hosts/maya/api/lib_rendersettings.py index 03c70ee3d6..3946750add 100644 --- a/openpype/hosts/maya/api/lib_rendersettings.py +++ b/openpype/hosts/maya/api/lib_rendersettings.py @@ -1,6 +1,7 @@ # -*- coding: utf-8 -*- """Class for handling Render Settings.""" from maya import cmds # noqa +import maya.mel as mel import six import sys @@ -103,9 +104,17 @@ class RenderSettings(object): from mtoa.aovs import AOVInterface # noqa createOptions() arnold_render_presets = self._project_settings["maya"]["RenderSettings"]["arnold_renderer"] # noqa + # Force resetting settings and AOV list to avoid having to deal with + # AOV checking logic, for now. + # This is a work around because the standard + # function to revert render settings does not reset AOVs list in MtoA + # Fetch current aovs in case there's any. + current_aovs = AOVInterface().getAOVs() + # Remove fetched AOVs + AOVInterface().removeAOVs(current_aovs) + mel.eval("unifiedRenderGlobalsRevertToDefault") img_ext = arnold_render_presets["image_format"] aovs = arnold_render_presets["aov_list"] - for aov in aovs: AOVInterface('defaultArnoldRenderOptions').addAOV(aov) From 6809d372b8b619a676beecce153f3cd14c273279 Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Thu, 12 May 2022 00:15:18 +0300 Subject: [PATCH 142/785] Propagate further attributes. --- openpype/hosts/maya/api/lib_rendersettings.py | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/openpype/hosts/maya/api/lib_rendersettings.py b/openpype/hosts/maya/api/lib_rendersettings.py index 3946750add..18e5e132d0 100644 --- a/openpype/hosts/maya/api/lib_rendersettings.py +++ b/openpype/hosts/maya/api/lib_rendersettings.py @@ -114,7 +114,10 @@ class RenderSettings(object): AOVInterface().removeAOVs(current_aovs) mel.eval("unifiedRenderGlobalsRevertToDefault") img_ext = arnold_render_presets["image_format"] + img_prefix = arnold_render_presets["image_prefix"] aovs = arnold_render_presets["aov_list"] + img_tiled = arnold_render_presets["tiled"] + multi_exr = arnold_render_presets["multilayer_exr"] for aov in aovs: AOVInterface('defaultArnoldRenderOptions').addAOV(aov) @@ -122,9 +125,22 @@ class RenderSettings(object): cmds.setAttr("defaultResolution.height", height) self._set_global_output_settings() + + cmds.setAttr( + "defaultRenderGlobals.imageFilePrefix", img_prefix, type="string") + cmds.setAttr( "defaultArnoldDriver.ai_translator", img_ext, type="string") + cmds.setAttr( + "defaultArnoldDriver.exrTiled", img_tiled, type="boolean") + + cmds.setAttr( + "defaultArnoldDriver.mergeAOVs", multi_exr, type="boolean") + + for attr in additional_options.items(): + cmds.setAttr(attr, additional_options.get(attr, None)) + def _set_redshift_settings(self, width, height): """Sets settings for Redshift.""" redshift_render_presets = ( From b1d49692e0f4e3e00c71ed23d818885042d4d0b2 Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Thu, 12 May 2022 00:39:23 +0300 Subject: [PATCH 143/785] Add variable for additional attributes --- openpype/hosts/maya/api/lib_rendersettings.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/maya/api/lib_rendersettings.py b/openpype/hosts/maya/api/lib_rendersettings.py index 18e5e132d0..3d229060be 100644 --- a/openpype/hosts/maya/api/lib_rendersettings.py +++ b/openpype/hosts/maya/api/lib_rendersettings.py @@ -118,6 +118,7 @@ class RenderSettings(object): aovs = arnold_render_presets["aov_list"] img_tiled = arnold_render_presets["tiled"] multi_exr = arnold_render_presets["multilayer_exr"] + additional_options = arnold_render_presets["additional_options"] for aov in aovs: AOVInterface('defaultArnoldRenderOptions').addAOV(aov) @@ -133,10 +134,10 @@ class RenderSettings(object): "defaultArnoldDriver.ai_translator", img_ext, type="string") cmds.setAttr( - "defaultArnoldDriver.exrTiled", img_tiled, type="boolean") + "defaultArnoldDriver.exrTiled", img_tiled) cmds.setAttr( - "defaultArnoldDriver.mergeAOVs", multi_exr, type="boolean") + "defaultArnoldDriver.mergeAOVs", multi_exr) for attr in additional_options.items(): cmds.setAttr(attr, additional_options.get(attr, None)) From e9426df72d37f65e053d6959ce91d2411e86e8a5 Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Thu, 12 May 2022 11:16:07 +0300 Subject: [PATCH 144/785] Fix dictionary bug. --- openpype/hosts/maya/api/lib_rendersettings.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/maya/api/lib_rendersettings.py b/openpype/hosts/maya/api/lib_rendersettings.py index 3d229060be..49d7d9fc72 100644 --- a/openpype/hosts/maya/api/lib_rendersettings.py +++ b/openpype/hosts/maya/api/lib_rendersettings.py @@ -139,7 +139,7 @@ class RenderSettings(object): cmds.setAttr( "defaultArnoldDriver.mergeAOVs", multi_exr) - for attr in additional_options.items(): + for attr in additional_options.keys(): cmds.setAttr(attr, additional_options.get(attr, None)) def _set_redshift_settings(self, width, height): From 4260f8a49ce3d63a0fc18741c6319f65e49122be Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Thu, 12 May 2022 11:29:36 +0300 Subject: [PATCH 145/785] Attr as list to workaround ftrack limitation --- openpype/hosts/maya/api/lib_rendersettings.py | 9 ++++++--- .../schemas/schema_maya_render_settings.json | 1 + 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/maya/api/lib_rendersettings.py b/openpype/hosts/maya/api/lib_rendersettings.py index 49d7d9fc72..c6afbfa19c 100644 --- a/openpype/hosts/maya/api/lib_rendersettings.py +++ b/openpype/hosts/maya/api/lib_rendersettings.py @@ -138,9 +138,12 @@ class RenderSettings(object): cmds.setAttr( "defaultArnoldDriver.mergeAOVs", multi_exr) - - for attr in additional_options.keys(): - cmds.setAttr(attr, additional_options.get(attr, None)) + # Passes additional options in from the schema as a list + # but converts it to a dictionary because ftrack doesn't + # allow fullstops in custom attributes. + additional_options_dict = dict(additional_options) + for attr in additional_options_dict.keys(): + cmds.setAttr(attr, additional_options_dict.get(attr, None)) def _set_redshift_settings(self, width, height): """Sets settings for Redshift.""" diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_render_settings.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_render_settings.json index 8a5730fbef..96b67dc66a 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_render_settings.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_render_settings.json @@ -134,6 +134,7 @@ }, { "type": "dict-modifiable", + "store_as_list": true, "key": "additional_options", "label": "Additional Renderer Options", "use_label_wrap": true, From 12a1e9e520641e1e3e700d77576c5d0d036f5879 Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Fri, 13 May 2022 10:53:11 +0300 Subject: [PATCH 146/785] Handle additional attributes for MtoA --- openpype/hosts/maya/api/lib_rendersettings.py | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/maya/api/lib_rendersettings.py b/openpype/hosts/maya/api/lib_rendersettings.py index c6afbfa19c..38f493a4a8 100644 --- a/openpype/hosts/maya/api/lib_rendersettings.py +++ b/openpype/hosts/maya/api/lib_rendersettings.py @@ -140,10 +140,17 @@ class RenderSettings(object): "defaultArnoldDriver.mergeAOVs", multi_exr) # Passes additional options in from the schema as a list # but converts it to a dictionary because ftrack doesn't - # allow fullstops in custom attributes. - additional_options_dict = dict(additional_options) - for attr in additional_options_dict.keys(): - cmds.setAttr(attr, additional_options_dict.get(attr, None)) + # allow fullstops in custom attributes. Then checks for + # type of MtoA attribute passed to adjust the `setAttr` + # command accordingly. + for item in additional_options: + attribute, value = item + if (cmds.setAttr(str(attribute), type=True)) == "long": + cmds.setAttr(str(attribute), int(value)) + elif (cmds.setAttr(str(attribute), type=True)) == "bool": + cmds.setAttr(str(attribute), int(value), type = "Boolean") # noqa + elif (cmds.setAttr(str(attribute), type=True)) == "string": + cmds.setAttr(str(attribute), str(value), type = "string") # noqa def _set_redshift_settings(self, width, height): """Sets settings for Redshift.""" From dc95b5ac0e06de4e8916ad5cd5e63ca30901e6c5 Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Fri, 13 May 2022 12:20:27 +0300 Subject: [PATCH 147/785] Import missing library --- openpype/hosts/maya/plugins/create/create_render.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/openpype/hosts/maya/plugins/create/create_render.py b/openpype/hosts/maya/plugins/create/create_render.py index 2bbaf1006d..334400bb23 100644 --- a/openpype/hosts/maya/plugins/create/create_render.py +++ b/openpype/hosts/maya/plugins/create/create_render.py @@ -19,6 +19,8 @@ from openpype.hosts.maya.api import ( plugin ) +from openpype.pipeline import legacy_io + class CreateRender(plugin.Creator): """Create *render* instance. From 201aa692bf9ac53d065522c51b1d780f3eec175d Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Fri, 13 May 2022 14:00:36 +0300 Subject: [PATCH 148/785] Fix missing deadline import/logic --- openpype/hosts/maya/plugins/create/create_render.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/maya/plugins/create/create_render.py b/openpype/hosts/maya/plugins/create/create_render.py index 334400bb23..e858534912 100644 --- a/openpype/hosts/maya/plugins/create/create_render.py +++ b/openpype/hosts/maya/plugins/create/create_render.py @@ -18,7 +18,7 @@ from openpype.hosts.maya.api import ( lib_rendersettings, plugin ) - +from openpype.modules import ModulesManager from openpype.pipeline import legacy_io @@ -79,6 +79,8 @@ class CreateRender(plugin.Creator): self._project_settings = get_project_settings( legacy_io.Session["AVALON_PROJECT"]) + manager = ModulesManager() + self.deadline_module = manager.modules_by_name["deadline"] try: default_servers = deadline_settings["deadline_urls"] project_servers = ( @@ -234,7 +236,8 @@ class CreateRender(plugin.Creator): deadline_url = next(iter(self.deadline_servers.values())) # Uses function to get pool machines from the assigned deadline # url in settings - pool_names = self._get_deadline_pools(deadline_url) + pool_names = self.deadline_module.get_deadline_pools(deadline_url, + self.log) maya_submit_dl = self._project_settings.get( "deadline", {}).get( "publish", {}).get( From a06bfc1648d242c6c8167b28deb969174380b987 Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Fri, 13 May 2022 14:02:26 +0300 Subject: [PATCH 149/785] Style fix --- openpype/hosts/maya/plugins/create/create_render.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/create/create_render.py b/openpype/hosts/maya/plugins/create/create_render.py index e858534912..c4a8e53a0b 100644 --- a/openpype/hosts/maya/plugins/create/create_render.py +++ b/openpype/hosts/maya/plugins/create/create_render.py @@ -237,7 +237,7 @@ class CreateRender(plugin.Creator): # Uses function to get pool machines from the assigned deadline # url in settings pool_names = self.deadline_module.get_deadline_pools(deadline_url, - self.log) + self.log) maya_submit_dl = self._project_settings.get( "deadline", {}).get( "publish", {}).get( From dcac9e08a6aae510f024389870a212d0aae1596e Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Tue, 7 Jun 2022 15:53:08 +0200 Subject: [PATCH 150/785] :recycle: few tweaks --- tools/create_env.ps1 | 4 +++- tools/fetch_thirdparty_libs.ps1 | 4 +++- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/tools/create_env.ps1 b/tools/create_env.ps1 index 9472c75c2f..b1337b5635 100644 --- a/tools/create_env.ps1 +++ b/tools/create_env.ps1 @@ -169,14 +169,16 @@ if (-not (Test-Path -PathType Leaf -Path "$($openpype_root)\poetry.lock")) { } else { Write-Color -Text ">>> ", "Installing virtual environment from lock." -Color Green, Gray } +$startTime = [int][double]::Parse((Get-Date -UFormat %s)) & "$env:POETRY_HOME\bin\poetry" install --no-root $poetry_verbosity --ansi if ($LASTEXITCODE -ne 0) { Write-Color -Text "!!! ", "Poetry command failed." -Color Red, Yellow Set-Location -Path $current_dir Exit-WithCode 1 } +$endTime = [int][double]::Parse((Get-Date -UFormat %s)) Set-Location -Path $current_dir -New-BurntToastNotification -AppLogo "$openpype_root/openpype/resources/icons/openpype_icon.png" -Text "OpenPype", "Virtual environment created." +New-BurntToastNotification -AppLogo "$openpype_root/openpype/resources/icons/openpype_icon.png" -Text "OpenPype", "Virtual environment created.", "All done in $($endTime - $startTime) secs." Write-Color -Text ">>> ", "Virtual environment created." -Color Green, White diff --git a/tools/fetch_thirdparty_libs.ps1 b/tools/fetch_thirdparty_libs.ps1 index 0226a35bfb..41a3585ff9 100644 --- a/tools/fetch_thirdparty_libs.ps1 +++ b/tools/fetch_thirdparty_libs.ps1 @@ -34,6 +34,8 @@ if (-not (Test-Path -PathType Container -Path "$($env:POETRY_HOME)\bin")) { } else { Write-Color -Text "OK" -Color Green } - +$startTime = [int][double]::Parse((Get-Date -UFormat %s)) & "$($env:POETRY_HOME)\bin\poetry" run python "$($openpype_root)\tools\fetch_thirdparty_libs.py" +$endTime = [int][double]::Parse((Get-Date -UFormat %s)) Set-Location -Path $current_dir +New-BurntToastNotification -AppLogo "$openpype_root/openpype/resources/icons/openpype_icon.png" -Text "OpenPype", "Dependencies downloaded", "All done in $($endTime - $startTime) secs." From 3780b37b999f3a830fd74e9ca44f5d1d4adf4c1b Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Mon, 20 Jun 2022 10:32:25 +0300 Subject: [PATCH 151/785] Remove avalon-core. --- repos/avalon-core | 1 - 1 file changed, 1 deletion(-) delete mode 160000 repos/avalon-core diff --git a/repos/avalon-core b/repos/avalon-core deleted file mode 160000 index 2fa14cea6f..0000000000 --- a/repos/avalon-core +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 2fa14cea6f6a9d86eec70bbb96860cbe4c75c8eb From 2a78532eadc976274ee49b09cb568a06ed44ea60 Mon Sep 17 00:00:00 2001 From: "Allan I. A" <76656700+Allan-I@users.noreply.github.com> Date: Mon, 20 Jun 2022 10:33:59 +0300 Subject: [PATCH 152/785] Update openpype/hosts/maya/plugins/publish/validate_render_single_camera.py MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Ondřej Samohel <33513211+antirotor@users.noreply.github.com> --- .../maya/plugins/publish/validate_render_single_camera.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/publish/validate_render_single_camera.py b/openpype/hosts/maya/plugins/publish/validate_render_single_camera.py index 3f08e0cd62..1ca2ad42af 100644 --- a/openpype/hosts/maya/plugins/publish/validate_render_single_camera.py +++ b/openpype/hosts/maya/plugins/publish/validate_render_single_camera.py @@ -39,7 +39,9 @@ class ValidateRenderSingleCamera(pyblish.api.InstancePlugin): if renderer.startswith('renderman'): renderer = 'renderman' - attr = RenderSettings.get_image_prefix_attr(renderer) + file_prefix = cmds.getAttr( + RenderSettings.get_image_prefix_attr(renderer) + ) file_prefix = cmds.getAttr(attr) if len(cameras) > 1: From 7e1015004c77d01a7c77c87e00be637c7c6d01c5 Mon Sep 17 00:00:00 2001 From: macman Date: Mon, 20 Jun 2022 11:29:19 +0300 Subject: [PATCH 153/785] Remove unnecessary var statement. --- .../hosts/maya/plugins/publish/validate_render_single_camera.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/publish/validate_render_single_camera.py b/openpype/hosts/maya/plugins/publish/validate_render_single_camera.py index 1ca2ad42af..35b87fd0ab 100644 --- a/openpype/hosts/maya/plugins/publish/validate_render_single_camera.py +++ b/openpype/hosts/maya/plugins/publish/validate_render_single_camera.py @@ -42,7 +42,7 @@ class ValidateRenderSingleCamera(pyblish.api.InstancePlugin): file_prefix = cmds.getAttr( RenderSettings.get_image_prefix_attr(renderer) ) - file_prefix = cmds.getAttr(attr) + if len(cameras) > 1: if re.search(cls.R_CAMERA_TOKEN, file_prefix): From 569d7c98fc7eaed14bb20ea86c5ea78c1d2941d2 Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Tue, 21 Jun 2022 16:06:53 +0300 Subject: [PATCH 154/785] Add hardware fog keys to Schema --- .../schemas/schema_maya_capture.json | 36 +++++++++++++++++++ 1 file changed, 36 insertions(+) diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json index d6b81c8687..a5e1cb45d9 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json @@ -224,6 +224,42 @@ "key": "twoSidedLighting", "label": "Two Sided Lighting" }, + { + "type": "splitter" + }, + { + "type": "boolean", + "key": "hwFogEnable", + "label": "Enable Hardware Fog" + }, + { + "type": "number", + "key": "hwFogStart", + "label": "Hardware Fog Start" + }, + { + "type": "number", + "key": "hwFogEnd", + "label": "Hardware Fog End" + }, + { + "type": "number", + "key": "hwFogAlpha", + "label": "Hardware Fog Alpha" + }, + { + "type": "number", + "key": "hwFogFalloff", + "label": "Hardware Fog Falloff" + }, + { + "type": "number", + "key": "hwFogDensity", + "label": "Hardware Fog Density" + }, + { + "type": "splitter" + }, { "type": "boolean", "key": "ssaoEnable", From cb6e093ee33be1aa9af97dca37e07cad7387b4af Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Tue, 21 Jun 2022 16:25:11 +0300 Subject: [PATCH 155/785] Add SSAO options to schema --- .../schemas/schema_maya_capture.json | 42 ++++++++++++++++++- 1 file changed, 40 insertions(+), 2 deletions(-) diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json index a5e1cb45d9..919c847a9b 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json @@ -227,6 +227,34 @@ { "type": "splitter" }, + { + "type": "boolean", + "key": "ssaoEnable", + "label": "Screen Space Ambient Occlusion" + }, + { + "type": "number", + "key": "ssaoAmount", + "label": "SSAO Amount" + }, + { + "type": "number", + "key": "ssaoFilterRadius", + "label": "SSAO Filter Radius" + }, + { + "type": "number", + "key": "ssaoRadius", + "label": "SSAO Radius" + }, + { + "type": "number", + "key": "ssaoSamples", + "label": "SSAO Samples" + }, + { + "type": "splitter" + }, { "type": "boolean", "key": "hwFogEnable", @@ -262,8 +290,18 @@ }, { "type": "boolean", - "key": "ssaoEnable", - "label": "Screen Space Ambient Occlusion" + "key": "motionBlurEnable", + "label": "Enable Motion Blur" + }, + { + "type": "number", + "key": "motionBlurShutterOpenFraction", + "label": "Shutter Open Fraction" + }, + { + "type": "number", + "key": "hwFogFalloff", + "label": "Hardware Fog Falloff" }, { "type": "splitter" From 697916c3cdba015b68f0376902422606ac27f9fe Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Tue, 21 Jun 2022 17:03:10 +0300 Subject: [PATCH 156/785] Append enum for fog falloff. --- .../schemas/schema_maya_capture.json | 20 +++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json index 919c847a9b..caf001f7e1 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json @@ -260,6 +260,16 @@ "key": "hwFogEnable", "label": "Enable Hardware Fog" }, + { + "type": "enum", + "key": "hwFogFalloff", + "label": "Hardware Fog Falloff", + "enum_items": [ + { "0": "Linear"}, + { "1": "Exponential"}, + { "2": "Exponential Squared"} + ] + }, { "type": "number", "key": "hwFogStart", @@ -275,11 +285,6 @@ "key": "hwFogAlpha", "label": "Hardware Fog Alpha" }, - { - "type": "number", - "key": "hwFogFalloff", - "label": "Hardware Fog Falloff" - }, { "type": "number", "key": "hwFogDensity", @@ -298,11 +303,6 @@ "key": "motionBlurShutterOpenFraction", "label": "Shutter Open Fraction" }, - { - "type": "number", - "key": "hwFogFalloff", - "label": "Hardware Fog Falloff" - }, { "type": "splitter" }, From ed0ba7e3ab306f1674def5f7cc69666cb96e8dc6 Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Tue, 21 Jun 2022 17:39:46 +0300 Subject: [PATCH 157/785] Append Antialiasing and MotionBlur Sampling --- .../schemas/schema_maya_capture.json | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json index caf001f7e1..fa5be19cda 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json @@ -201,6 +201,14 @@ "label": "Texture Clamp Resolution", "decimal": 0 }, + { + "type": "splitter" + }, + { + "type": "boolean", + "key": "lineAAEnable", + "label": "Smooth Wireframe" + }, { "type": "number", "key": "multiSample", @@ -209,6 +217,9 @@ "minimum": 0, "maximum": 32 }, + { + "type": "splitter" + }, { "type": "boolean", "key": "shadows", @@ -303,6 +314,11 @@ "key": "motionBlurShutterOpenFraction", "label": "Shutter Open Fraction" }, + { + "type": "number", + "key": "motionBlurSampleCount", + "label": "Sample Count" + }, { "type": "splitter" }, From de6bd72c2ed1ef40b3cecfa4a7caed9cf93937ea Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Tue, 21 Jun 2022 18:01:49 +0300 Subject: [PATCH 158/785] Appen defaults. --- .../settings/defaults/project_settings/maya.json | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index 69fbf5bfdf..6780921001 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -491,11 +491,25 @@ "override_viewport_options": true, "displayLights": "default", "textureMaxResolution": 1024, + "lineAAEnable": true, "multiSample": 4, "shadows": true, "textures": true, "twoSidedLighting": true, "ssaoEnable": true, + "ssaoAmount": 0, + "ssaoFilterRadius": 0, + "ssaoRadius": 0, + "ssaoSamples": 0, + "hwFogEnable": true, + "hwFogFalloff": "0", + "hwFogStart": 0, + "hwFogEnd": 0, + "hwFogAlpha": 0, + "hwFogDensity": 0, + "motionBlurEnable": true, + "motionBlurShutterOpenFraction": 0, + "motionBlurSampleCount": 0, "cameras": false, "clipGhosts": false, "controlVertices": false, From 269c752bfc9ff109b8a75e57b3f78349d7bf1b75 Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Tue, 21 Jun 2022 18:01:49 +0300 Subject: [PATCH 159/785] Append settings defaults. --- .../settings/defaults/project_settings/maya.json | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index 69fbf5bfdf..6780921001 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -491,11 +491,25 @@ "override_viewport_options": true, "displayLights": "default", "textureMaxResolution": 1024, + "lineAAEnable": true, "multiSample": 4, "shadows": true, "textures": true, "twoSidedLighting": true, "ssaoEnable": true, + "ssaoAmount": 0, + "ssaoFilterRadius": 0, + "ssaoRadius": 0, + "ssaoSamples": 0, + "hwFogEnable": true, + "hwFogFalloff": "0", + "hwFogStart": 0, + "hwFogEnd": 0, + "hwFogAlpha": 0, + "hwFogDensity": 0, + "motionBlurEnable": true, + "motionBlurShutterOpenFraction": 0, + "motionBlurSampleCount": 0, "cameras": false, "clipGhosts": false, "controlVertices": false, From 759cea424c30b06960fc7a247055b96c58ab0eea Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Wed, 22 Jun 2022 10:45:14 +0300 Subject: [PATCH 160/785] Start separating "lineAAEnable" --- openpype/vendor/python/common/capture.py | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/openpype/vendor/python/common/capture.py b/openpype/vendor/python/common/capture.py index 6b4c40a6e8..ae3a0d5cc1 100644 --- a/openpype/vendor/python/common/capture.py +++ b/openpype/vendor/python/common/capture.py @@ -361,7 +361,6 @@ Viewport2Options = { "floatingPointRTFormat": 1, "gammaCorrectionEnable": False, "gammaValue": 2.2, - "lineAAEnable": False, "maxHardwareLights": 8, "motionBlurEnable": False, "motionBlurSampleCount": 8, @@ -383,6 +382,10 @@ Viewport2Options = { "vertexAnimationCache": 0 } +Viewport2OAAoption = { + "lineAAenable": False, +} + def apply_view(panel, **options): """Apply options to panel""" @@ -496,6 +499,13 @@ def parse_view(panel): except ValueError: continue + for key in Viewport2OAAoption.keys(): + attr = "hardwareRenderingGlobals.{0}".format(key) + try: + viewport2_options[key] = cmds.getAttr(attr) + except ValueError: + continue + return { "camera": camera, "display_options": display_options, From 277682c03dcdbed093905188def33545f8a9b24a Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Wed, 22 Jun 2022 10:56:49 +0300 Subject: [PATCH 161/785] Revert "Start separating "lineAAEnable"" This reverts commit 759cea424c30b06960fc7a247055b96c58ab0eea. --- openpype/vendor/python/common/capture.py | 12 +----------- 1 file changed, 1 insertion(+), 11 deletions(-) diff --git a/openpype/vendor/python/common/capture.py b/openpype/vendor/python/common/capture.py index ae3a0d5cc1..6b4c40a6e8 100644 --- a/openpype/vendor/python/common/capture.py +++ b/openpype/vendor/python/common/capture.py @@ -361,6 +361,7 @@ Viewport2Options = { "floatingPointRTFormat": 1, "gammaCorrectionEnable": False, "gammaValue": 2.2, + "lineAAEnable": False, "maxHardwareLights": 8, "motionBlurEnable": False, "motionBlurSampleCount": 8, @@ -382,10 +383,6 @@ Viewport2Options = { "vertexAnimationCache": 0 } -Viewport2OAAoption = { - "lineAAenable": False, -} - def apply_view(panel, **options): """Apply options to panel""" @@ -499,13 +496,6 @@ def parse_view(panel): except ValueError: continue - for key in Viewport2OAAoption.keys(): - attr = "hardwareRenderingGlobals.{0}".format(key) - try: - viewport2_options[key] = cmds.getAttr(attr) - except ValueError: - continue - return { "camera": camera, "display_options": display_options, From f43042eb169ad9214ad01511a3c4dee5fe469083 Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Wed, 22 Jun 2022 15:12:40 +0300 Subject: [PATCH 162/785] Change hwFogEnable into fogging flag. --- .../projects_schema/schemas/schema_maya_capture.json | 12 +----------- 1 file changed, 1 insertion(+), 11 deletions(-) diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json index fa5be19cda..217aa947fc 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json @@ -268,7 +268,7 @@ }, { "type": "boolean", - "key": "hwFogEnable", + "key": "fogging", "label": "Enable Hardware Fog" }, { @@ -304,21 +304,11 @@ { "type": "splitter" }, - { - "type": "boolean", - "key": "motionBlurEnable", - "label": "Enable Motion Blur" - }, { "type": "number", "key": "motionBlurShutterOpenFraction", "label": "Shutter Open Fraction" }, - { - "type": "number", - "key": "motionBlurSampleCount", - "label": "Sample Count" - }, { "type": "splitter" }, From 38522154e78c22cd0037dbbe91c2ea1f820b057c Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Wed, 22 Jun 2022 16:45:51 +0300 Subject: [PATCH 163/785] Remove flags --- .../defaults/project_settings/maya.json | 9 +------ .../schemas/schema_maya_capture.json | 25 ------------------- 2 files changed, 1 insertion(+), 33 deletions(-) diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index 0f40651c35..f9201286ab 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -496,25 +496,18 @@ "override_viewport_options": true, "displayLights": "default", "textureMaxResolution": 1024, - "lineAAEnable": true, "multiSample": 4, "shadows": true, "textures": true, "twoSidedLighting": true, "ssaoEnable": true, - "ssaoAmount": 0, "ssaoFilterRadius": 0, - "ssaoRadius": 0, "ssaoSamples": 0, - "hwFogEnable": true, + "fogging": true, "hwFogFalloff": "0", "hwFogStart": 0, "hwFogEnd": 0, "hwFogAlpha": 0, - "hwFogDensity": 0, - "motionBlurEnable": true, - "motionBlurShutterOpenFraction": 0, - "motionBlurSampleCount": 0, "cameras": false, "clipGhosts": false, "controlVertices": false, diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json index 217aa947fc..c7842e5031 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json @@ -204,11 +204,6 @@ { "type": "splitter" }, - { - "type": "boolean", - "key": "lineAAEnable", - "label": "Smooth Wireframe" - }, { "type": "number", "key": "multiSample", @@ -243,21 +238,11 @@ "key": "ssaoEnable", "label": "Screen Space Ambient Occlusion" }, - { - "type": "number", - "key": "ssaoAmount", - "label": "SSAO Amount" - }, { "type": "number", "key": "ssaoFilterRadius", "label": "SSAO Filter Radius" }, - { - "type": "number", - "key": "ssaoRadius", - "label": "SSAO Radius" - }, { "type": "number", "key": "ssaoSamples", @@ -296,19 +281,9 @@ "key": "hwFogAlpha", "label": "Hardware Fog Alpha" }, - { - "type": "number", - "key": "hwFogDensity", - "label": "Hardware Fog Density" - }, { "type": "splitter" }, - { - "type": "number", - "key": "motionBlurShutterOpenFraction", - "label": "Shutter Open Fraction" - }, { "type": "splitter" }, From 137ef3e22bbe9495e69a4379c3f85f367f3b4a7d Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Wed, 22 Jun 2022 17:35:51 +0300 Subject: [PATCH 164/785] Remove keys that fail. --- .../defaults/project_settings/maya.json | 6 --- .../schemas/schema_maya_capture.json | 38 ------------------- 2 files changed, 44 deletions(-) diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index f9201286ab..8494989556 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -501,13 +501,7 @@ "textures": true, "twoSidedLighting": true, "ssaoEnable": true, - "ssaoFilterRadius": 0, - "ssaoSamples": 0, "fogging": true, - "hwFogFalloff": "0", - "hwFogStart": 0, - "hwFogEnd": 0, - "hwFogAlpha": 0, "cameras": false, "clipGhosts": false, "controlVertices": false, diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json index c7842e5031..ace9fc22da 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json @@ -238,16 +238,6 @@ "key": "ssaoEnable", "label": "Screen Space Ambient Occlusion" }, - { - "type": "number", - "key": "ssaoFilterRadius", - "label": "SSAO Filter Radius" - }, - { - "type": "number", - "key": "ssaoSamples", - "label": "SSAO Samples" - }, { "type": "splitter" }, @@ -256,34 +246,6 @@ "key": "fogging", "label": "Enable Hardware Fog" }, - { - "type": "enum", - "key": "hwFogFalloff", - "label": "Hardware Fog Falloff", - "enum_items": [ - { "0": "Linear"}, - { "1": "Exponential"}, - { "2": "Exponential Squared"} - ] - }, - { - "type": "number", - "key": "hwFogStart", - "label": "Hardware Fog Start" - }, - { - "type": "number", - "key": "hwFogEnd", - "label": "Hardware Fog End" - }, - { - "type": "number", - "key": "hwFogAlpha", - "label": "Hardware Fog Alpha" - }, - { - "type": "splitter" - }, { "type": "splitter" }, From fe77fe64adfcd2bf2c9eb77d6c5181c0fb20dd5f Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Wed, 22 Jun 2022 17:10:36 +0200 Subject: [PATCH 165/785] add new studios to main page --- website/src/pages/index.js | 27 ++++++++++++++++++- website/static/img/Logo_On_White-HR.png | Bin 0 -> 77588 bytes website/static/img/NoGhost_Logo_black.svg | 31 ++++++++++++++++++++++ website/static/img/agora_studio.png | Bin 0 -> 133985 bytes website/static/img/igg-logo.png | Bin 80331 -> 96336 bytes website/static/img/methodmadness.png | Bin 0 -> 8650 bytes website/static/img/noghost.png | Bin 0 -> 22435 bytes website/static/img/staticvfx.png | Bin 0 -> 12912 bytes 8 files changed, 57 insertions(+), 1 deletion(-) create mode 100644 website/static/img/Logo_On_White-HR.png create mode 100644 website/static/img/NoGhost_Logo_black.svg create mode 100644 website/static/img/agora_studio.png create mode 100644 website/static/img/methodmadness.png create mode 100644 website/static/img/noghost.png create mode 100644 website/static/img/staticvfx.png diff --git a/website/src/pages/index.js b/website/src/pages/index.js index 0886706015..ae7119e928 100644 --- a/website/src/pages/index.js +++ b/website/src/pages/index.js @@ -153,7 +153,32 @@ const studios = [ title: "IGG Canada", image: "/img/igg-logo.png", infoLink: "https://www.igg.com/", - } + }, + { + title: "Agora Studio", + image: "/img/agora_studio.png", + infoLink: "https://agora.studio/", + }, + { + title: "Lucan Visuals", + image: "/img/lucan_Logo_On_White-HR.png", + infoLink: "https://www.lucan.tv/", + }, + { + title: "No Ghost", + image: "/img/noghost.png", + infoLink: "https://www.noghost.co.uk/", + }, + { + title: "Static VFX", + image: "/img/staticvfx.png", + infoLink: "http://www.staticvfx.com/", + }, + { + title: "Method n Madness", + image: "/img/methodmadness.png", + infoLink: "https://www.methodnmadness.com/", +} ]; function Service({imageUrl, title, description}) { diff --git a/website/static/img/Logo_On_White-HR.png b/website/static/img/Logo_On_White-HR.png new file mode 100644 index 0000000000000000000000000000000000000000..c86030e1e78092c7ecd1de2cd129198c99680ee4 GIT binary patch literal 77588 zcmeFZWmlVFw>4UVOQD4pw-$FRR*E|W4J{s^Sdk#1xEE=0cXxMpx8TLS#WfTtZYMn5 z`wzTljD5~{$NoURkleX!tu?PX=Sq;Ovg~s#Qmkjso;{bBlTv^73={b58NdVs?b$P= zhPELE#6M8X#g)XLJ*$eqzB597_Dt-Vyp*_x3)0~VW-7U?>z~K+q`2C8)`fY1v2l&V zk%C0O&-tREa=ng`ipyUjh6$h3DU(^{)nCqp zMe8Xo_nt-WDv{#UY|8_Xw6Dhb`hZa z+-jJOqPAmzkZ*_wB@ml@iR*mgGQT~}aer`UI>($e?D&FhFjy6(c6L~zNFZ0i;mwR< z9n&j2JoYr?(4xOOkb8ELP{iC-Yy819+2tB-->@$;yp0a&$iU@y0E2%UsO9|o ziVUET29Jwro4oMt8gKMWW(&<4bVJEm_H`YEgS_KJgFZOp2MnemgraZ$lgS3bT$LucLsb+5eIc5IS57?*y97_NNe zI6(Pn)=GHg===G6MbEmxH=B;ecE*eZhG2uBYGDqDedcMlR8l%1hf-_1fozx~v6Z5( zJl(g7Gt_?1>HgmF>%-#PoxU!`5omGuR{iECKQ0HF z*8z@tI`YRCZJ{;?ThjY+{U-Y0B^(`P`r8J#B1z?sS8HI{f=++F-)nSc-OK178_~{> z-se9}@bOr|r~9!{ApF%;*X84iRFM-s=d?TJldOAKu4xk+It#9b#X=5X`_1tK2ac9B zGhYG-|DoUN?WyrT6&{P#Ft(>wYJJ6yn5jFS_N%okc@|BrSIVqexfW7IqJ-@^8?Vt&!s`xO2X62V)y}rgS_lmuu7-W#(rmtyasyi~<3?0Q z=v{`<(Q`7##yM)LPv1um?ngx3NIa9r;*jGkCQ)V?II2SpYV?Nn9lqV4L38{Cz`wkz zNu4<1lQ>3(6f=*VV`@im4cWa`Y`TR1Src#a?6Fout4Z3%6YCd@oRu6M9 zEb!$_WxC-#mv5mH%=mfW9d)?}ETCIcQkl)}yw0qKxDns)wOuCn5BZN6F8C8;AmO-> zrpUPnA2t6WU*^o})WOSf@!~(Hd~3iM8nrJarufqZIO>wlqQVboLZixocok=I?6zK`h5= zpGS8bBB)G-VwUVONi&<=>raxAN+Qalop)3y+}Ah6^He|V=D1u*$DMvX1A3ak2?@Hs zf=kmn==XDR&j38{8V{{*@$t?(M1S?T41YK6q_?PBL1s)rmZSZO=>DyS zaqf0Wsfl_WhFK8|A$ZCm8MlQn=SjvT(pRZ$_Bg>+HSF*RpVah;y&xa&l{Dq;=!>C& z+L-Sjx8hg42ilGLw&!Hq{96C4hnyxDxM>e7+Cg>(Ip~PS&vb3l{J^R3=EAe1gn}Re zw5`Po{gi|jdi$+hU{`OHPf*&B;zQ16y+C~!89)1y`_fT+#ZGM_CC3RG8;?S}O$zjE zA_L}mJ*Ko3yg$9@Dpzr|NxfxyW^B6r{u_LL zfH~yEmq?cDHjP*k2%H*GbKNa&1iynuTDoI!#J;uPB3IzP)Y#wBh;EJXc1q;R&4LSD zJjd0byi6?=V%WcMlt=V8W6q+)>sOdk-cB1 z3ZA#^VP%gfc0d#bMJgH65a@$J?vX1JhL%jb;mKtazoEOD4W+38B4;hs7&$A?pWgzV z+Z)MZZo{@2m)utRA(CDOgo__J+d_5LYcKILG=~|7hrCYm7#BChc)vl3g}$y13A2T| z$;wsi(8Dc=jCvUigbWB|+Js|%SQsUr#oVtasZ}D*qT0gBHSo}<$pcPeBg|8CUJ*vD zkeiYOTT;6&XmtE&o>8Jkwb9yj(&ndSP^8?D;oy3P1?z zW}jPL3kY|Ki^#U&*R|fkt?z-Bs!^j#AVuc#EV8RVnb-JY%&TLqaU885oo{{Xtme`M zo&QW!m}pdeQK~FRBT}4jUNTfOaj=>b!GwqARyTRyz!q1MR*+*W!aNA!_lO1tfabJxaw7t3^~6U4P0z=;}o_^O$Kvm6ga3%H^yT9G?s-8?Qhj6 zGvDLf3@)cbv+OK(TvNU29`C)DyzPE>6*!%yx1)?}m#${}Eb>VkmtvQCX_72Pz$ zP1uHV+hv1SLsgbX>b)?lH?De*KX;?Z7{zRS(DG;s|4z;9dEDod2)l={;-VKBNswqCpRe#_fbjUF*C`8&Ym`dH zLZZ(Cai0&{-d-FzJL6XS!P0fv*UTr}Qs#z^v6O4*L~zyX|4K=$CtAGoN{>{P#R83W zf<=A?3iB8$!&ix>*vPaQs^*9mXw({Eby3n#Gq*Z8pYy<8GG1uqw-~`58D*_|FkUXy2g@_FN4$7^jE9E@bX-$xRpli+7t<@UjEL?u z@VCnw_q+QlqFzww@1F7s(%w71!$GG42dChz; zbin;gc?s+DOBb3li>tz0^mI3_<-+K6foax1t8k4wUoIL+TlpE9&8I^zgc)sP{601~Dyv zM9NyipjiSNUycrRDO@j$HA-f5oQ2gsP%@cgmTG2{>bG2sH2Hn^(C@0&6wW`rh)yZG z$gwV2Vi<91n_i&^xfLj6Kriac7EX`&$t7HT(#H5@=f>tw`g0SBM@MdhiwCs^{wjUq zCigwrm?Jlf@%!%XagR(QU5+EPp((}t6?-&`;j-_7MK+@%5gP&qv|k1;7T5|H~ zX-ztEn!uHimYB#lX{QP)Ty?+O_4&n0@P`4?#vR|U54M|`S2umb8!qr|)&_NN84bvH zi{_hc8bZ*BYO7#-kw-!d8(Brq^P3e7dLYS$lXV;}&=#uEy*mXb4ivt?->7`Qu0<7~yW3$Hj?5m+CzM9;b!UH?Kzc-c zY5rr$eZ(}^W0+n1DenIEg}rJ;CYOJc=h#qN=25LE#V0ZuWpNCa#@v|$3t5e^flcz_ z1O~PS@e{WpUF92(sMNo%*O_iw-~TUw8ULS5Assb#C;H_pR+U$^L1vbSGIh!w6{F;s zTa^La?DgVb1UDP?1a6^UuC*;q-aO>YOxUuVYt}9F3*DBPHn}1pSJ1FkJk4M=+ArlNw1d>&`0a!+iJ_RoCHQw=m8AJ@x(tp;=0qd3oKP`FMpi ze)ssC2j(7Qw-mLq?=^qT4`oKp@aB^DOMe_C9P+f5SsBE=$8w{3xftwOd9=+*_k*nr zY5cAgtwy)|EI5H;xUeI05h<5rCN$7$8Do&xHp=F=4i&O1-c0we=i_eYSn{;BV8t7n{DUG&|bPH#wLW-Wq=xT3E2$Iqx|za(U!TB(;&k zMB!#qO=9#?KvU?Z$P5?($lTseCRHxqHZjmIKTcH1Ym@4W0Obk-Im*-L~bP^WuIqF=^kn zhZHe7>IP5OR=u)2dHR?JNaoL=0&ca&viD1R#@uu7X6p(jaX!#YawYI@UZU_q*QSNe zQOiP(?lBuLEEnblp#e7*-n#iM?|49aaD!^e&ITWx-1a^993-;&yZiw$OR<5K8d$Hj z)TPM8y^G7iy<*~z67Qo@5?X4_!bj8x>;PgQ+uvU#fFwdrHy@2LFvvDF2_8!v!(S!( zcM;uXH#n8% zyxI$pkEQ|jTI;y9^4n5G1vWWb#$zvnKYupZn@*VSK*Z^yLvNUm5mM8_aan%xJI_wO z*F@(o1d&KEBU?K3c*`RjK2ecE|5SH^t2y6O?9rXFr|K(yRB4kXMqDwTQ=<_;#cItY zG)@dOEvOiJ<@*IC_D1#H^GO9_03t=`bBkemRH?i6P~IRj2nJn8>XDzHX1b10eE3_E z>1YCRcNQ~sQ>Ej_?BFb^(ZI>CzQB5GfZ~QRsOgbM86vBrVZ{d)`Wa~bszoeMUHgsg z4{wrpFGOvBLgTK9P$AgE`+Z;K?v+y{vTJ$X%X*IPS5%(9 z^9G>(IuiS<2y;nO%%vIs9t$VS4`Rh|(sPC9eFMO*>xfnF0No-7O8aXSX42 z6D+BZL8~EVvD5D#s63B`0&HCvYEq$Etu|E@k`urw{I;94@X4$w-LjafnTES6!g0=g z$rue(e~xiN{ld>@Sver4-`K7pOCDqkg~F#wls0QV6kyTsM;Xw1O-c!Cm-SGnZpAr7 z4c)DHj!r+AakCRRawT$S$x&d?>WO)EKZt{vtmi9#(}En;R2l)N{j=PY--ePx4@kqb zNtPd3zv!KY^sdDz(!cVXjoCUfPaaiCK3odV6UyZF)NfH{n+TCUw<0h|SdPVgUmnrGbiWwh~&4JQe}7^OC9x zBrRXeo3uPW9%8-}GeB~_P(@>{IMAh*W&z^;RhQaedEG=m|J(@2^tXv-J9l0Re)tBxEQ}wuUMEwv^Wer(7>{+ZRf4S*w$swS?Xw7Y9`1 zVs4Zp}$DZ6F0?GC}r%>#Xz((fiN$4B#G zuVAER@}N_-D|z^yf@xE_+Yk!Tn z3-&-Ox-{9rBKv2wmnXR0?M|UTl5Cn2W^WsfY*|#DhrR*7`L+3Bce>i8 z_QVCSEeSQO=inlLz0TginS+|CA@#aQGemp~DEQoxX86EF=4^!zE}4Jp-xX^yi8GfF zKs2(@*ImuJxnRrh#BrF^cej!n6WdUP^lem7GiCJb=BU6cA?5UR$tE7wa{KeN>JE7- znaAL)Z=!~eDidg*d-RKzBbKbBX7o1cR9;aS`n)JV=UhtXR6A7`-=HKP>@GjFqR|f3 z`E_Yo6DG^Hif-*{!>mGfGo!)2;z{5P9sxRDqD6#ZHVQc{3pE6pY?k=jwzzUe?w4s> z@nKo)9ViDvi$b>KM~CK9rnMd(DDajZlZD21WnoQP{bnuJ&LKP^4dct&4+6%VD;J4- zSVefgHojxN%3en2nAZU_8DA@EuI(9YQjBZa>TP(8ZTDZ&6~%`^BG2r+1lN|Wd9pu^ z^3f3G+~w*?GTG%p!OP<%b&KhpucftoTaJ_FcMx#y2*3X0+Pu^tu?1+U9+Kivru0`g z*Z0z5q&SV9t?vYB~Oy@M$)a=9aWP@c_ZQexgled}S zCRjivrIiZD-9OLOq&N&#JT;WTp3<)%lQOnQF}4qFy1P2bAdWI#(u`f5=;rU@Yd_vo z+jqQiTqi!S7#X98Ul61*prTv(tRY$dKd6TOkIAn8`$hjV^7jA1IxnHbLXckKA?W>I zSim@i2q2)}Lb})w}JBlg{rIR^FnQrywNXnX#?*k0mo+KE#N3zbNqe=vQ^3|S`P?H7rpB*fPG(MKq!=Z4js3o#EvjZ8g8ay!ugC%oKHKF&<; zYeZ#lIJ$ns>KjITW%@A{prg{a;IX*mbop4jL#*kAlfNEappU}+c7mi=V3DNZt!bHo zhYE1Jptn?x^M)GOE{_mT%%w8KopjzUXfCLcr8D^z{g#Opd*=HRCH#4|%sgMVQY@LV zX|~HD%fOGtyd)$*b6LkbL*5b`j6sn{LgX1$>4ax~MeI0~Z+*nFInq6BXoTqtO8N*ud8zcu;gY`vESu+Z&3Z zaPPk(evl$6BJ0~c-6=dpB_K!Fd?}+JQ%(AGpcp;sGO|NSnRk5P=8E2?`2*Qhq~g3F z@OA!dzzW*=cZN+Qjx=&^VW z$W&cuk?bEHdYyp>V&j4O1>VlEY=wf3m+A-UAcYhtoO}B2>)ya>{%9QNA zuAYQUZY$T6u>?e^Q`?uMMh5J>3Mnu*&5Vlsa1XCnYXmCCrXTUM!)}S@5kh6looyfC z3IHd+nb#%s89)+sHbEfP(d2h!M>8gjiGrzofaRBO1YUy89PNpBF%Rd&_I$w zj0pLP!WWyP^^bo~*#&UN_0t2a?EyZbP!2vVeHEucT0UoNRhbnZyqTKygYZy5UvH3I@eE-F;UGIW?aOgvp4@k{p@It`06 zaE4!s8TyKSTAN;PC%gZqT={FLS$)r!&|e(G2y>+F%5e>&9`SfFV*-?F`c5%sE8$hq zHJY-Ey8F4nntVORu|N<{-39*O{y|$pz?`Wu7+W0|38k7zmHkyaGe1-Kp@Fhs#nOtk z*My4Ssaea^q+3}ABrwiY#xhaW|Ddkj+rMlw7NZfF8H~K@jB1wOs;h?YYmN0~r^tw* zoc!GAvh(O+$C-Mo|C1HptYdvbf$#W%`fz`L&zNA)`GBCY8WUkgic}5lfF9GU2a%x@ zSrQ??BE~ukY+rv&|A+0O>F>+pstEqj67)`eO`$3+} z{yDSEIca*)qhLJ)hsa7SQ{W#&UvyqKkL%*`@NA6+sbiQZi=nAgVK7)YO3xz~!5dfV z$69`Qs8Eq_;bY*zSK|i;P)(>`->Pii{CuYh|(Zrq2ugLmRbb)3< zSL{Ic9L#m2E?9JXh`2dvApj$8;&RLik$IPxWz}H!&SFxF*>C*EaUjWX%FXi!UGtDk z05{LIkK@5?8ewgEt_N5V%AdJqk$Yp z=W-wV}iy_(B z`|v_W&wg&mkQq*y>4~Dwu4=_MF6SDY%P(bUR(}!jM%trnw(O$710rUW1Ayin^vJLP z#Swv_J`t4b{RI(bYAQ+L-9(f|)N>l`l9{-NWvr8P*9g9n^q+{8R^a!PeEG%h4k>d2& zv(o5kI+{2WPg!JKQPdf!MSZkCGLQ7EJK_Ku^OG} zv>IuzcnElsyK;UMprL_I8mE{6pD+hiel=*le$my-n8}X@Sw0x~t~YYl@vVN#CS!~L zMC~5l#3F7reRK<9;L(`7)J@pl!pZ-++_D*nAhNGsBr+3*t^IP-I)mEGv>TafQHA^9 zxD5d3Sa+1u+Ojm7JNDcl6%k#rN5WDb2%CVI6rimBL3tt{DtONCwIAtFqI>~rH&}Sm z=)4ZJRt-82SU*3K#+M7uTaL5qC&TFwU$)X=@?ESKu;2EQU(O*O^U&tqLx+IG&u}n- zY>UO0nzi%JZ=3OT6<`t|>Vob(ArcB92J3K%H@MP=A?W3gv(e+H;NeC3$VB)!kKjZ~ zdfF%x)yI^lT!%&Ck_@g8d`R1+Diwf)a9+D&+(h7hq(Ed}Q%1?`czhUe@`XWoNt7Q4TfVOq!V>U`eSrX6AkI z=uJw`pniGzV;tvxU6wK$i0?0ei#n3jx5+t={x$vy>r>$W(G&RgZP7mT?mg!>)iOx; zbf=Tiw;KyGejB}dJMH)BT!{*@qS2^;PZ)y`3-;Q#FTVYuHQdnTfKHC{yJ30BWerK} znMwv%y8RF%MlNn^Sn+a^s|nH@HDr!ji4V-Iiz=Zk5dTL0Law^gbL_*0RLse+v}l;# zRbAN<6)%bArvr6_*r>E)vq3HLB^)RJ8&>tb9Dw_|0kMT7pTsjH8UPeLSsN5w-n?Io zOuwInD}-8HGgA_q1LvklJM?ZyD(4xy30>Rd95eYMBo0XKidAZ(>sWOSnT3owR~LaK z!lgwfK6FYE@QrgfRt21VjKgf8ol#4Jlya|(S3jnGD$?NFrE!qY-X&w<0eMO$2zkfjbiE4Zt*HP%L)h>PGOE~ayRf;X_kfT#>J^bl$EW)ou#pfkkxKwaAERIAwA5Y^ zBf>s#9xFX5MXHdeYAw5Hh(-!hhR2b}O@V@O@;`8?73zw?aXSibT_XoTAkZatwPz$` zIH0=&bwMO-Dfyvx0lH|Qlmo(dNM?;okMC^^S^)d*Ti?@M$PhtHaPqUy^XsGmWs=`* zwQ3og3l^{ac1eeA6zjcK8cc`dQ^|DmR9(yn-3k!pgbv#k7gois*6J|fHhWFPd5<*W zjpAh8We5xR8vObS{?RT6jS<07an9tc8+RBNvy<%)G3_|!Z$wsT;UW%}oM)WGmeGG) zab=P=V|N+cnQrQlH}iZXfW=7$I+~v*(nCHldtxA+RYo0y&X%4TbD~!Hx00m_-q+rB z+AC&HPh5DyYhTHsr^nDHqPYs-oRESi6#t@h)H8>;stPV?{OwVmCK(tx(=(+6$S$k2QdbeZ&2y!$VovC4chD^;a;C#*2Qg0WmF&NxDDXAsjGsNT=n@Yoq7wa;5D8DXp%X zEf+0PqVML-g(<$ZPL>;VJcRZ*Q#DqZ_n{G%e{emQp98xec_Dw))|a05T$cB`%Z)3^ zFDN+P|3GzKG3hdJddcsqU#I3k>L>&v6bFCL>0h-K0tw3BJ1c@&ZtIMfpaiZv8gROn zlyk+LK_#@)jCkKWDw)anC7I>hFFGCD4rkTZ9fcPa6H8GQO%f~LmyzXmI_kDgjla`r zFcBIoFi~xb!NO>;k&ALx_bURMQnbAPl;1l$-Pl-W-?@ViV*M`GzScWDx~+_Xnh@R` zGAuIqL6Nlr_K>S@Kl@LsZ-c*5wu9)`v%*&aGiE#>XPDfxbpYLlD5^v<1 zwOyhI@=2 z*5>2B>48;rZ>HPzm%pxaJ=>m6?Xk=r?WE8 zx8P!6TSZ)6=(Y+qgm@Aa7Qq(wt=cX*apY9y_C!+n$?glma-`LHFXJAClQ6G4>iv0G zaj#$YI|Rm^RD)OV9(oj;8et~CT?i5CdZ$%eZN=i(92a^WWdQ;=lQ@)9T-7X41Z zg^JF;;xX=ZNNUX?Qk~h6Xu=^5AqPUzo{@4n-8MH-HG3OhdB8}7Rg~e2y)>bCZy{oX zfREMas{J)$b=qrhMPDLV%H^LPFa9hyg<$2c^tK_W?S+*&HieV|LQF>U!{0L9lVgoh zZiOa=KDWd3SpoQ3K3y6Bq0x&-1bbYwx%lqZI%3Mo!-DFai{tJ`_A)*j@agy=Z;2Zc zDcEe^d{=&NZr}Qx8RQ=qw5|(8z|?!xWyR1zbokx_U360*qFCOJ-V|#v%Xtwl~oIIg>^1zd+Ezwt2N{IR@-?S9xh>!l>v0{io$ z$k?$A7Iko{t4PHSwmUZ$>ksTGY z`^as6(b#6LU;^%wOp<5LYL&;Yk^iUp}8M zb0)VRT`iFHj>4P-sNI^A$Mo(}n#M17ZYh20E(Ku}vRT@w6%WYRnlI2nNdGo+@@+w%F?Nr7On8^M6cnLQPRx zu`9eleJghEZ(`OMlsidkFQn^tjY_zmBZA&W*MtO^B8Yay^RnmJb~rx+|6Y14dW*i1 zqo8aMrbf49Ip)~mddTVn>f^r27#CKsNjhuGdak3_85;o1u#F;#Y0yn&&eEpuG=x>- zzDdmioSNOzxz>2f7~ zJhhEW?v?)O<~n>f0xK^HDULZ0S7`A%y7tkPm+vBD#`1i^XT210Z~nId_-viM8X0gF zl~-RgK31`okMix6Xl+_d`}-(WFkE+D_F2}PuoAfD72&4b{MgV>zF-w{U?K|fW+qTR zjvC4U9fnTGYAASq+1meT+jOyLb$L4`LTtiTU{SfhLDm7Xb<@VWfP~W4)0&j4oNo}v>sn)SsY{&FH_vX=%ZPcs{QT*@pF9>kt+L(Jxf ztq4R^(AIz$;Uq>WOI&_l4lGTGw)SRL&eF5~LKKmWO3FB~b6d8J9SNDD?v6kj- zwpeDvGXhYXzE<#mZ9XBUMsS}dqAkacfi}rx@583;Go+&teFy7og`yRN%Ec_^XdUch zi{#42lJsSD%e`lpPxZ9Ri=XN+2f2W*1^`8a^a;CLA{r@-n*}{<78Z-1Bk=%$z$E)z z!vESFPSy#}Pq9D+lJd0uc#JQ?dtNS0=B|otWV%BXNC}Kpos~YVV`ZW%_78rZS%0f^ zY96?I_COSXC`|I{TifWlC6Ok~iJ>T71xXB-SHEgCB2-`fm!9qW2i0Wa?zuCx_g>|J z;;{&Pr8Lzo;oB<0?QGZ1ffay^t$Bij7>+(cmKifn4dIHHMU(1wdAkuQ$axoV+TOhA zx$==Y?o7fq6b)g_?Cg!jV-0@$w9C{!KCZq+K|et|3}WHRGQyJ{*we&vVnQ2f4^crq z!S33 zc9nVqz&&LD{q_hlT=kIff#kSoV^7f=;>1w2Ot-;MrpF~L?0I#iL;2>HQl>hihb}uo z!%tm{7KI~7-KByQti13Cnr{>e6n>(&HGNhbhGy_SRBBKr9|d7pjHNyQuU@K7)u(=+ z^8HlL4P}MrD?Wf(EOLao_DuuFQ=4WTpB$h@*|1vGB?qVgsb=S-LUN=x_q3K!Ln(b7 zX(iwl@42`L9|9Tzfu{K-@~{3A?Y>tiu&vVtc&9V`SC;3AwD_i-0`_=^{Pu0zt z_#9;JhO*hrN46<~b}TO`;}NMM$q_Bt;M1a_LKt$q3+^{Y$;L;VZ)S@{@qa7f2RIe~ zyLv|TYg=c|obw)-SL%4Hbb23>M7Ze9Z4;&Q%ZYVCvnJ%k68(SpnEN{E zcErhRj_bX8$B5W2abWR(B_v-2*5eW0!nN7*+ht2Mk<8><(twhkqMOs`R{v$OyUt6< zJC+z;WEwoLe(0+}-FjK6-|6|dZAMgosouP9{Fzz*UY_q?g!i%|R55QhWS#OalT_jqLKJ?fxa>O_He}@<^xos7a71xB-Kte)X4ha}lUUeJ@ z+@RdP<)MmN&ik2`xiunwRy+O0*T*UKZ29sUblL=}pF42YdV&8Y7w8jI6zj;i_wR_k z$m^V*)+ln^wPSpk8a21FD0j^i?BL+ou@y7XG^@2T^#5C`?mltznzTf z9YC*1`eNM5A{jc;4o1-NPVakS3t z45x*Lt~nQxh2v@|X~6$-3hT?r137)B81;yH+SI5XRp!fv8MAu`ji#YXk7b=(iN^IN z+>#%%_|8ev;liC;`+Nev6(vkLtTJm^XK8XYshW+NAKZstxA;D$A_`L}0R2%+f*EKaVim%*R^9SLeXJu$RA$mN*(KmzSf6pE+2V|^37;7g&gddSi? z?do^O&rU}SA$i)@WN?-J*c+*`1s96~{fD=lz|J<+YQ30k^B!29Ya4LUfWx`4)jQdN zmLFwvWOb#wf-2fm*j`k=;PQ~b@H4DkgK))#O?wx3%xgi4MfQinecZD)4^5VYK+fG-B9-3iU`Y3#Lv5CN1%76bDA+MxerJTVE>Xg;Pm%`s^>ufflE zOfyztG+$l47_=fixeVe|_kB|-&n!)EZl zWTy;L)Ai8n)(6O8(q?aOuLkrcSnl7c{qd1n-f!&0!=^heH_3uel!QeGCv2E@M)!S; z1~j6w4J=8F&^1nXduQ(!w7~p1uMoQx*R4!P_iych*tDkV%umjL_$KUYn^&8KAMGrIvvTv?ok3&;A0OZWr1v^#ilLU!Bc zL_M4vBUd`JEDev|bR?4S?A>M;Z~o7Tkj;P9LhJ`Xp4bp}JXa4JA5SNHE16wVKK7|1 z8f9Cw)cFQf-oncuYOfc++lwLM{-p5Csw70VkllZI&&v8RgT`xRsNLUG}8 zYBJ8qysKAZzI*y#H!k7~&@%*6{S2?aI7$Y%P*9r3pj9^9=nos-WDpXqee=g_ z!1u|PKSpr(?KxL7*C6^|GDuK>z>6R1ooT7jkBwFavAdX7Awfm{j>NzJmD3jiodHB( z7m20Gity|*wZ*}$X#%U7urt|(cnHFS)7ZJXWFt!68vlzI!57VsU%JiE&uP2Y1C_Py zjhupxL!nuJY}?jNNB|@+TuOz+>xwEZJ|H(mXR9F5=X-opBmb8bSuvKMeyw6DdFM2h z8KL&ITUdGY28b=9F5@NeZ5C?4g`xMg8MqQYWu);4LI_OYov@(}&4ESI1E%SfN{{lc_K{YXJIaIc*Krp*FEXcHl zgQ$s#!KtZe{{o~^SB%~4&W4O8R<}#k>Pd31p{oqnvC)mcyr38vS&s08+3j&T`zo# z5On***M(zzDx~8(^dqyD?R%}52Cd5+FYTkoZeH6lc0P-YR}qjGEB4wgIxNUNfA-oKR{P+Q{%7-!j-oVxPbU}cGhCEH-$*_-T z!@Nm<-qh^8vHaAuky$lJAxy>OSnD@yY?8;g2*t#(SoR-6VFD})p1Ll5{urLT^weTm-zWV6!$cZk_!9aeSq>j} zRay-LjlCUOik$Pn(OHu%uQ?O6T7MI%fi;*5Uy=1jD=AIsdvAK6bS_lL+h+dRnWcXO7OUFu9#Ge@07V| zY-}4QwMR#z*%^pHz>z#?R39OH_*{6uDE(jreI4hCq<#3Tkv}N7yW0K3{|nb6p;#ik ztj|(vm;eZG2%-%J?O|y7hZ4s^>8$mt)sTcF_H=dC^T1*(y)cBaiJO<#v7_^~8WKPf zC06}rx2_;H;yIILm*xH3EHGL(GivrlVxof8cr?+!Ef7K3bi?g9$Z?^S-YO6h4at2& zD*ntD^{EMi^k}yV72DVi|3X^{ac4|GbcqYnC6mv~X0 z91aM}!dC#y9!{ojwWMY+?dlVm#$V!N)TZ zS7Dh6y-fOx`bBqyMylk?Y_u`|l#kl5G5!l=UZHOG{3<(<6-Pnjx$izoqGT~k^AQ6^ z6BZ5!vt?|8x|>{SJNA!pH4^C`*$E%t!3DmErU2jv_O2r4Gvj*h7yeH01ImFXH=XcY z&4e3b4yDI$0sn5e)E-FJW~?CEyVYbFVBVQ6&!u+fmMls5$nBvUf_sQ z{noHA%`5z~RrzFOTwC}D&>Lz|UN5f*ghYnA>BcOYV8!!cuzdcn5N*2i;%kCQ>PS7`r{4z{ z&ffj_|FHL#Z&7_;+^`OVQo_(6E!`;HC<4+cozmT%qaY$F-2+N@r-XEOH`0wDISf1p z{NDF-J^#V;;`fFZb8+pn&)RE$)>@wx2S)CK+#;qUsU7%X~%qh;BVT>u>!84F|p#e@MwRHH!iZyfuvY>B*u;MZr zytXe|l(jyymByHv=BF{8-v#+QMZpjhhNwWSh{vh}1DkWBiV1sxO|LjnWAe^GQp4$e zO`Ydr)o%JjgaD{q~a$75;zgIyKeF+BCB1h;q^8UiWrXriSEuELh@0OD40hy;<8v7@b~peQ$C3zIG_oP=7Y*)I;4RI>qH%ed4&vp zO-h(Vf5jKl9b-NA27oUAXH4x0i;Z{emn@wC-aNZ!G))veuK!7li%hHM5W;w1rv8k;Fx2MibzvPnR*pCJ^ZIM#j?W}@1_l)GfD zDx?)I!Ot&Ps`LE}v0$Pf$;syC3=g{>jI#&6GFCQ?#gdPtu$?L;RX+K;6XR4{7 zc<=f{UmH_qVV!yAlW#3YfYO7t2FHmdl<`NR?I-CcsOV%hTP-Wh8Aa6A~T z%uQXUVX%1D=Y{6K#E#GN70< zUS%l0v!B(=?}?~hc^)rHC(~nn5_MOptY1Cr6ESq?$V-aU?{i;@f}EbN%x?~-t3-IK zKDnjhU~lcf69j=!-qKOJ!z&Jd2fy-IrXUWGc&6vKV>-3S2=<^#N z|Ej}WSN$Hrm||gy!B>P)d#+@8c%j38T4;4@)8q;%cGsBm&`q#Vt zbICkj1xkgj; z4SQypTxG+OVs|q{G_vLXbk#ONdl-;;V1rtGxt%e3yRwLA)MZ$*@y{BS(qxNWTDz{o zBx|nL*gH^%Gap{xH=#F8RSx()uJ7NKs`|lvE+rGFjB0WfjjT*O9zhJ9vsv4bCbmxO zP1Ghp=XhDI8Mp-)#Ddno@|&GZ9!SQ?6_#n|2PS)S!TS>2he#%XzDjRFTMtZB)esm_5t(4iFN;mln0|Cu^@PzKFLWOE0{!k zW|T}_;^p0SEz^sSuQOT#1gE0*bhO>|=5DboyJ8>ZUY~!fI-pEX!~**YX@!Z^2Ix*_ zty<)cG!0aAYwJ7bC)Sm!F);%jOrPSf*ieGk=0TWyu$lAQZRGv^4@R>9ZBf+Nm6k9g z#Yioxc-x$cdVQ>1qgOGgdFwxERec~EU)lN`8lFAY>#cLpGzET3IfdHuv>D;VTNP7; z&SLs__U~ic^LljjhIs>joR)yNW*t&mL#5L{B?Kl@&_ zAtBO)%x7Fs8@h(sc3t$<)DcG%r2{1KBLrKs#ScLod?eyv`~ftSPdNQN+`n2_zrL&I zt|xbFxm;L*SA_pP+}24dzD52`QAeGc7@g;NZ;-17t6oeP?THSw*vYnXi;j9OTe(E1 z_OOcjDb^CDIV^%*k7Oy@^lD))?d)34gH@@L;_-Y}q2GsG8w?Qt8R4sVPXi2A^Y79r z`jM8v&b)K6buR3F zXM4+M?b@tGMzV(-jmER;IKdkJRtUd}@iX&LzO>iEDEwftruQ5{AG6A8^5QSz74~%l z7vvsY76&(jZ3a#6i=Nb-MPqu!nC*(2 z*1wXVO^sjH-=BzrYV-=fIjY|+RXT$10N*DzfxTvdQ|Z!NsWoRf(lRx1*X2FX3zNGr zO6@u=*MIF88{%l7j|&)D)G{JP8R~*dT%|`Hqly($HM&hQenNGzb)W@1(SA-rF*VUHRE1aaEK$* z+evoi>a``uvpMa9_`JD(C&81>dvfA~#uUF()4*fHkcs5OgtLl3$oPx>X`^Kak>;5Hul{(ec=yGwK zZL?;gaF)t$e2#dou9li`)1RzjLHKetg%Dyor;l9&l;bxfN@2i(>=T5_zTY5vr zD(4m8qQ9;Dr5pLrtoU^<+W}lK3<(H_u@X_R@8pP~+w87YDayGD$Q-`WNnm#kP1};b zO*xx3X6sFp+6P8_kUy?RIX7Ok{+UHj{Yr~6BXyXAv098KPSJ1F;%1Iav$w@Q=hf@z z=S&V4&4T{Mct5&K(`yb>;oJh3(#Ruu;Tr=t&!&$zcs;Kr}ZJ)l~=*#TnazRQF?~CVIIT5 z93@6LvREC{7DK&==5UJEHQbleC%+ocm+eYwTDdJf(?nSuScwY_H+l5`6GfpbGv{DLAFXy&1c3gl(60m3Z62Uk!eemITd zC!N(7@q1P`9gEGeXMQ$EP$_nr%NSFzkzPsd;^$P{P1c?P()*4sUu2t<*x%I)wS_{( zc|6ZyF|gNuy{fBU&@305O$s!9l->@GVt_Ky>Wndy9^Jmn3sX;-^;*lhRAK4<}Fl3q?sN^~BWnn9!&DE1s| z0=kSlg;9+~$w@i^eg-v}d@>4+R>qW#F2~*G_X#rTA?nF;X7kc{sWA=*YSd%(C1Cm}uj|Pg;ABEU&>G328>2Dbs3;>aue?TwGzyq^~d z&b)D>n)4{qqO6ws=LurptN>@h0oQZCD?k3&+Wl$$Ta5e3up2x?&|0#V^FB)?Fjvpe zKoDG&m6~Ebc)j;I_CvMe;|tdf=a`Yu`SbD75!@W;1iD(vS>)WMwyf~aVQ^C7396(c z(EMQ_X7|in^x_#^g6dJ2XtIqYWy_-fcHD9s>>!BK){r~rG{cr!M++JTWWW8Vd|oWO8ui(-1=36Vuus%BQ=RY z3rz;-&^vi;or0u_yar3EwL$CJVeGE~^D@CDZ>4AGkEbtEsA+0|#-lF+H16?Qj*n=y^*+e_BsxQQrgy=77OeZ<=!t2`Z?|8CF0jbii*{RZJ+p*dB zRsx5tZi|gcfpaKv)DR`Z3Nvg!J0fHogU1PtDdEm0(UGV=FgGK@S|BZ=aV45) zIdv<%|1J5h)+qw6xP5c+I_~%@IcFsT*&~59XeBj7DrSS1b#)i*Mt-WHDSb zk5ZjVt?>aS5iXS5zcZMhaF>&Nwo@cgI1PkYR`*HZPy(rIu05DW_lwD#mjHP|Y4(`k z^&)?fsz<_1L(tjTV0x~;%-hq7g82-!zHwDnHH9L?RDFdoysMP<%G({uhQX%ph-&N5 zKcSM2V!TPHIMVdhYVoV2NG8?P&*$vtt~9WM&ncBGhQ)36PSwZnBqf($Y0yG-|@Nq zs$E>OXo8pkxXvcz1Vuh|7Ay~<=(ByIODao z<&^aFP0-N~#|r#^wCsP$%6}=T|A^rK{q{dW{8t(BKSBIY5dQ;;|DogmOv3+=@PA17 z|1%_v;(Hud_XU;cA;Sy&`68X6k}uS1SQd)0&QQI!je~mK#?pdin9`u+3saHNL6E~ryFJ1>PKAoow)&U7CCE}RhWvK&7Ph*bhPK?N_5{D%!Os8BcXXf(Kz?}l zhh93vwn*U%1D_hE)Ux9G1H<)ah&tNwXBv3xYFi)9;t22iv*q+NATU*9<(@ zay&v>3_@x{7T27`XQwt#qbkdqqx%c7hJb498FJ=7R2&5XVZ?J4wm*+1jjwvOi`B`v zpVsu}h%Kz?d5-Rv4)H8jx?OF(2I^KD>Wb&@ro~6!eKqmD5hMtC75;x!nE)t+E@B?o z5kHFQomqBow;c9=W;*Duj1;thx~?uOswZ+YkPriVdZ<>vK;kyNXFyg*#f<&#OY2;< zwZD<3 zbl|UxE3ClpOz7N2WAbX~qKRAfVn;RiZ!STL|lgs6hxkO$&cNpp?lNfx$=s{fUerx53IH z0;~+Y2L}Xth!M;|d3t-;Di_b9iz7gG*|vei=t7+4@-)8zbG5~HQE@n7w1W!#nF-?O zwk$FMj@T8t5%K)3oFYW4LjC1lIY_dPxmORwYLY6a0>mmAOtpwXk*I=>>VO>EHUvW`&I%sq*V-gtm|!c zqk6JG7>&x%umF-P9sMBmfYwn^xb}f`u$j~#yKan0ucg${#Z3t)jmFz`y3m_`sPXVm zSgQka+we&w7VjtHi*(vFUUKiqV(@Q5l+b7CBz`ser2aXtXbihzkN#~`EW;4A8V&0v zeIB)IG8M41y5M?r=xz~Epp9_A8sw%LafcSgAe@&!oinb)_x4&$HSMZOw}nNCUCw%K z)$~fcCz575>rbc*CNk|N#qd5XO7s#sSUtBXj-;YhVcj7!oW0<%GvR;aHC0&czCH%c z0LN27*gA!2N#ohpMlwk&MtZ;3sjet}owj5Im)N_PuuD*VWvC7SCNsnIJF)N80Qwu< z2AF~?R2288>G;W?-QOCxM#x&{(evg?!u)OfS!!<3E4PWgl|YFR#lt1d%>CTgz8eH6 zdmMg^3-|>k+n^^oU;UnJC_X5Z3=2eG?yjACaLPxMdF?Mw0rv%_c5ZPn;S$-N^^s2y~nEcNzv7`KDn!d(g=aXbzV?zcXAxc`*2n_#A2!hi3Qb?Ai|nMW=fii1>Jz zHt9$DQF;((^JN7Bo0RcTiF^C7$*(MsDWU6KpT^4E9&S95i=xwH4vsp*bG6lBlLcgx zG+3nIFTu*fX{*xa(A-fth2Ic_-d{l! z2vLKWCj9pSywaDzoa{P@PZE^2PiFMzYR7oLJVfjWqy?w&wiil`Pbo;L!N@gl!r6w2 zX%6`7`F=moyp_NUY25{`UGb6<_&@Z(2N^+6_uA$NTgtvdk~di(1h7(f^GaKw?^1k7 z4^;JCq4g_6XCQ z;GnJC@CFIx-IiPL=|l9&d2-cQpAF*N)@KJpw2o$OH$`oa4nhRo5C7ioT*{pjyv&)ul+T%1vD-5*wdXVT;TUvpnaB{k6-J=@^ zs681l^l`XM_mddl?<>9ml&jA_;{Cga4Sk1f50#<1BY8RD^}hQq3es}3(xfjMhZ%m4 z)iYe{SXYeC>{5uoQ6}D1LV~t~R=uxTfo~M?BaWR7P%*SyTPHF)&!iq!l2oG9ZRU)l znbo4!yk>|^CD-oo!()2CRsa@|-nuJh5_=ooK1fm{85^O7QlxEoJB3)*afLia`O5&{ zniSvQy(s~%_XcX<>wN3B4Z-jYvV*+uXf984-S%f*s!!Z(q?NW}sr#3yxi4ez+Sf4{ zyc|Wv(5EcoHFdB$-uJ=)-phyNKT}bU8p!>n1oM*$)<7{5^aoDeNguP~7bdFSx6}@* zPKfMLnsL}$9-x1J+&lkP^D9n>n5O;i-(z<;*|#j}aBQ68ewRRn8Wq(0I`9&c`Aj)|7;OUCL_ApM6nY1BtXNsVmY|&=#x7>^V){# zp8LW{D(QP6s6+nl^w)&yuuv92Mm^?=hY!n8zJWSy6`PxDo$RBi#;9O|ER+2la%O$Z zF*Ac$Lc}h8kH1I9I@P{ydNrL}Gm4MG37QT}(e`n2 zK~@ua2dwpJVv6tq78uIK+^JTsZ2T)bXqgV)LAho{$F1xHoMO7c5d=K~v&d5R%=4$L zF)`)|#dK$oI=vM~5_)==%qOuW;NVUO2w)GFE$mCH_pt9m5ZQQ#UH z;5weAcME6G2yV@%vBzcMnL_GP#Bvv}rVl0$_Ks@WpMZP;9G;Yt%Y4wI);zQcdMg-u zwYT2O7t4>fL~e}Ms8tGHX;78)byk?ZUkGpnv%j1|K++u zvi!Nn9IiBZdU)iJR(B=weFx=i3odE*v0YAsB z&flru?CKedR*(i`$i=O{W%~3W2n=~j>$|m)>!GH#zspR$m!)!p@_LhzCFeEg`eeUd z?!5O_J;Ei|mS)@9_F`B(cUEo7{p82jTaFeRFH*`k=}SF;jqf(LU;ITu^!u3veto}- zf@UH;h2oxhbG9>8gnTD;e6yuj!Fb_`9{PW+p~&l|e4CAI>FcGWdUdaP|H0gIYbC3f zxjQ>MN1xA<{Z<`dmbAW(d^`=r(nd!8WkWuykvynnGFIKb)>Ww<%ILz!iBdoKNGbuOU4xR3*rV$M)W1 ziNiYCj%OMFgy?7!=SlAp|A6k{z97PfDE9hH5a#{X&=D$Gd_K^55$~yZ-e_8)U<+;l z0mf*V?sdj8ph?%POyvLU6gKo8xJG54SxYXu%xP{^aiF*i=TA+TmQUh7>D3=2%a9pc z&kc^#*}n1Sbe2zGe6|P1D_bs#@huCmyrA8XubvaX#(4(;VWV>uRFk|=P_#k@bnJa= z44vsekI@F|fV%7+oYro2jfJH0R3R_xBF&ClvEIXt=bCV4Riy>X?P*?D7n7o<{GxEx zVB5+}lea}~@0a^|ZySHHSrJOJ2Q2X44AZ6OqmZ78{aCMRw_6Lf!oJriM2XJ>D?}c2 zSu6z1GrbpymT?UZ)*qC}_6t_#*9b=0?1E7EEA)m2io$RnVhZ+nY{{*06ExXNH)mq$)4% z#%s)G6Z&3N_6v?FgVET^Ms8&;lN8bQgZ@%OI5_ZsNk@VH0mN6aI^a{EU!(WFd8$YE zDGpX8XE(+u)pZ>C=U8`5yR*v+RV+=4kQZi&)|n5dk1aYnRB%0Mf?Uj_FgvWi4YrM_ERfHv{AcDVdG%fh#=H_C# zu*=Jh%lIEAYe@V1*;igoZk!+Sfr63S211~^AH3>X5?kzwCjFQ%l#tl_-((}5!G zf#FNH$dy<_Tq*vuI@ZOKT8751C;kym2&w6%9g|W~uaqrtA5)ZRuO|SnNv6e$dROQ~ ziG9nOSPy`cfd^U*+A>C0eV1I-Q27Nd#vX>V2-N$(asPF~CC{d7*~2BB`)O9Ov?^^4 z)dg8VYYU0ykm)`w`dgwSM6IL`_~^dJsm??E_T2p{|6rkIUMRux{3;l- z1K(?!x6jE|e_v0LN*^ztqHbTA7>_3tls{J-=H_LCdpyPAdy_G0u>k*)EAGn|ujXfy zhe|r@Ij8^U(VuUF!gNYpP+#5!59d**czaUd(*5Scs`%&rmZJ-?or4>%f?nq*?bNF` zq111t855;mZ#$}>Tu(rwd;%}N`5|lOos>#PWyCw z@2kK?b?E6SxGw$9G}VB|NC-Qh&^;JpB*=_r@$Jo{1}#@YqZLz|$=5REl)7^lGcvS@ z1HnGI?(_=xqb2*LKMP18VH6YTb53EdJ@1%ulR@eTjOS*kOD^lDMk1K*T)qb!@M&JC z5z4;3`{w7-Qjt)=bu38SlArqLflz|dSgt~$E>&L@#<x7tzO*4<0Jw>x_MEw zh=4q^Ki7EjRgX&dyyA)nD7?3Ki`4#(^5Veupy|5jb}H;Wx#xe+TI3(NJ(D-o1g&;4 z!aq;e`i0U0&(hqIt@-yXLy-BTl_*Eqkk=stA`LJh_J0?Az$AyZJa@(o+;D4Z&4y?= zOH{R>a?yrMLQ+8Ow8!MY!1T55VL@BaY%bf5PFzM8GX4yNK>V)p<0jj|V>AF3DQimp z!QwZt-C%TNcdC6NEw?D=^v~0=s>GN$5@r`d-nZ8=&jvAz#@)s;qbORV+X12MC}eeBlAOkEINNzJ&-f!6Ia>&6uBk zb3l}XqvZUss*)r$95+Uk*jX=*>*~FGOz0IxuPqpSNGKjoPoFFD+ru+UTEO=w;?shA zd-ZfOH}^XSUkO@=UI~Nr$a|+1kSkNcI2G~3Vvu05c$Zs&2)T%~PPM|b&&EgPagP8p z12FebPY#Xi|FB!@xgq{U=(&%5wwQ853(cT#ha3RSuTmLckfrf=FGti}r8k(swRIB1 zkA84A@boAbejj#dTIXe1lRvk%J9)u@1JXJXHO}KK&KlNZid<}T_xH~?ep26}oaVDg z2_GkfCW70nL!C)*W7xkk(${wY>;zN9DbO8ufA)}Yu^;N*DqOv!aql#bZ(7x?nIfuf zBl-qaK|d%VexfY=L|hEPJ7&&W?#M-1#_XhO3bJQ0rewS}nrOYZD(Hd4vqy70{GL|% zp=t|X@8iWpIi!D*w1Ya-_ZoC-ucFe!@H!iK%CCDu0T)t>K6LFuJoyIhWSwR;nA);n z{M1p`_`Bm0<@M-Os3tN%CPa9n+7)^BGyi`AvagI$rlJtb-?h4qo%qud$thJRvHFJA zu6w@5j3@Q%9={?>X%aZ*PmG!QMW&6%&r5XI&=oQsfsBe9>D^{pHEuxj ziuLKfxD99&-K5vIJ^Pn^n1~`O=38K4u|(H5{}!flkmtF$*5ph}PhTSY?|@drO)y-| zjN`gXxFYXe%{025HktVi*BhvB?TwUvc_%GIq=&x{1#NX~#h_q%FZ{(iQ@0Nw&5b=~ zfq(FpD0aTfhpRw6$gIGsg}q%X_Cp&!@7KN0y)-Z|KA^@oZ;E} z)$`rT$WwmHjpCpcO$$Z*7mCuGOu)%&=>K|%mBd!TV?>h#(Pi#p|R$j zN;ZjSe{1wl0hAG)x1JZ8vjyxeZ)DvEoNcs$Dl-G(+YAF8a5%?tQwxe`-^>5c38B$K zrffn^TwiM{Zg#zSEfX)+3+8!WI2@Us1$fT=8ga78H=PA@7v+w*F$gn?c2tQKJJ7gu zvG<~_aUe-s>qW!$+XHdz9J=hssBf=8E@IVtpC6PS!uMvy_vm(ykn|5%a&TM`j;a3W zBk4e~A9SPVk+r4=YAJra$I zh#&2}mYGX%O={Z+Sk{qrN8--{SO_mlvKr>ISKCM zqD1febObgKkx@t~KM3OyloYHWj3Do zUCUe~A0p(N?Vcj3G771|N|<&vB%#+-`eencxTb-Gj1<hxe3RAlH(w;vvY`2SijnIN@6l=qu5g1p ze%g^{2czJ<&UpFq>bvI2PoOsszAfa0YCYr29a-1ZF;KBS?QNO+3>t?fSDWJ=bcFEN zw`?>bT^4aTZfxJyfPEqaXe$qA;eX-KHh7HX>f1~-8hffJk&eO%6$cYYYs^!FrrHeX zQ(i6hT(4M-jiHE>TF^JPE1101DYdPpF2jNDsGnM{Uh4Xl0Qb}>;@O_Ogfrpb#`<33 z5+I_p%^duH*E~TiUgF|&UbNb6zHUze70_|*a5aGRy_bs&-uu(3{^q3rVgtW1p)O6~ zBwv5dQapnYnp8jaS%2h|88ZE&dP3KX*17}NC&;x$|1-ofaWW5aQnv3`el5FhS?il*}CR9*#@;!`Q|pVI+u2rqB(KfSw`K&%4borRiZ&=q|; zErLGE?G?qyf|t2Hx7q3h*^6L55ktzL}#M63ppe+L2#s^5Fb z|8@p=xWJc_jA|wnJEIgmeh8JF!P<)Q%{!D%S4jb`MO5f!Mg#N-l9+BS>;OR&)kGd{ zl$?d^(rb`N!7qXH=yX>w`A6yA@62%Jo?z)jI&%4jIqovpdWiR&jTobUztcImf@SY! z_XP_NZjZbnLYh83Rpwyo zl^+l#p!E@Q8^-5PNc##JU`hQ9{Zbe<`Wwx#tH>v5{pkRvU^EP`oh6u}!}yl`;DBeu zF`0}=Ilj1%_3SuwAF!^NDn>*wE*%4FJ{#5_{%_6Spk*ze1Yt1BUmZ$43xm0%#HKFv z(3f}GrM4R5Y{gmjy7F+5S~5=Rtl{r%tg@5~E-ToptwR-<WdMSi{R|e5sS*zss(t zjx5jvf93yqffBf^b0%4BC13L*wTRY}HjDuiifd8$#^`d_`Y;Xi)6mg<7xf#tQWM1q7UyTD4isDk?&Qf8%k^+1vt%cMhw|kpIo{3$sYctb!PPbIl z&LVBkV;#B8OEs6^F_rGMmrRqkU<*op-=9O^Dj(W?_|JdOSr)m|-cc_>o(=~qR zHTAMPKPX=x8#Fe`M3g6{>L>pNb3L97s)r2KONy)nrOvpW4RsC{i{Ov z%rJ-0A*r9pP3f#w`|#^0KiGf>W?2*RN#=((;>~NPI5U74*_eND>`;I)6^1~ngXFuM zoC;Wm62j(sMNuC6a?-|6C|);Wz?g;Dd-vOLFrrSmu*q?TA4sM&nEizC0z~8b&tuM0 zIrAbSK;~Z^(@mrw7L-(2>DnvtUg*IuHU!;=m1#aZKv=0xXf%52&c4@5MOx)2i_{s6a)lma4ep{+M`_wO7< zW7La#_n9gxXNS9e`@+N5O+cDT|B}4)2^^(^VWd~UpIlZSAQVDB^loT+26DSDc^4hD z&(&BuouIbw>a>6{(d7n*ZgrxwU_v)So278Ox<8wVr!n)kz&rTR-yacxu(e&W1MfDX zX<^|dOGT%@x1FLP(2wZANTYfJS=7}Ut>sACgw7Kb>s=xHa^LouB|CQ!f&0J?xNb@H zo49Sk+RBSnE8C=4qFC0uzKCOEgEipX@V7j_-J8QF^ig#5SS6Kf7kEoqM2P|-%68u4 zT=2++j~C<~MHe~RIFt@7&>*CzZ2VK_Mm9Q}>Yg2dZ{Y~D^lJY)zdoJudo1h5K|(*N z)upiB&6M@Cvx_EB4ln6?;@ZBWSK1HUs%lc&Zs1)q7C^b3s`^%jR`2MIi&U=uMm0>a zJ_dcl>xs7T+cMx56y)w1^FFX<(ZZd4G{*`MD$1qR7E%C>8D}#12~Nft=>ahB2gNZl zSbfwriG_qj2EHiR_tzn#2Ou_by*r})H zB{zKf7}QT-AvbZ$9csbYax9n&*+Z+T{M64w-T{4F3_*JQscfYC7&&J%r^HIF&>q$O zm-ohTCBNSj_B*^pjh7#k; zQ7B*2H~GQViJo+?AY3AOC=whqRWLb@UEj6tP$ZcfGa962HivkZ?jdoZ zlL1_UqUD#JBK6kfEt3y$l9%;9H7=iA7_RpT^LfE&yI8%;hy%m<`3 zcbj=x5j{!)r&JtsLHeiB~_(O5syYEBB7JPUNLpx&lg)A{K8ej$JNBk1*?)jT5P^ zif-oadNHqcGW~LsmL4R5ZNc^Y?E5;?@CIOcy0F;qETnLH3|77}m(;QSjQ4Q76)tWL z=HNT4ER1~drsdwc$*Pcoz+b7Nvd!cqd~zzd(!%+g#&UUt1>v_up%^eL5uvW1>u)m{ zK(lY)zwcMZM^$T^OQJ8%3;Kk*&cd;1)6C2==-@fDh+^;xkO8e=W*?Q9AtQ9ZC_AFe z-vhL^*VQ8MbLotKu(W_79Gx9V zz=&iWOE4H_0Qd7rUoj_Od^>=6?2w;6C+vKGc07y`raVBq@ZOxNt3gofWR~bO8W%c) z+$Qjr;;XRK)fs?%ub0QXtyxjm=^Z4C8|YfLUnIx_h%!5r=Y{f?0kX8cL~5k1f(8Oy zPhH4;8o{i^J7Mda5Nf5!)WJ--F-x>d*6R#DfQ|UkDXpHzpG{!fM_mR? zmJ5=h^_BBvQjtyv_INUZpYjMQ4$_L&CBT)?1fMS?o)DX{rEz&K0lb*I4+yJB-JiX= z$gY1I$i$p{({zp@gc2d|R?qbt56CL`67*EuEpB~j!`=>URja(3rIEiaXa5Ce{J$j# zKy#CPWogH^-UF?HS_9(uBJFZ?{fC?bhi7Cku?yUx%L;*1<@3PyNFQ1;Lt-E`u9)B$ zD=tG>X8HX#7AH0{5C;f&wMB{h8ma@c8S^87Jzsxe#Cr_tCV0mjr%38O>`?Q0zP-W6 zmhZ3Z*>k$-D?(si1P$lQb;lyW$xPN#?fzIE-l_~TS5UAMvWm&2C9t4B@Mm`Plwq!G z9niHF|0NscHv}x}^n@V&5tPPLZ0j{_9Rd?;()QQZ)yjrNGQ+Tk=*aGpGz+n;<%RjD z7S<&w>7Z8%CY5oja{066~aWMAKua)IMyne(umh9gW(ts*{Gsqw*lJXrjauJs|IDfmStt zL^0TQR3xCwtI={gdraZV6xTxRNARi+IEK5ui0*H~D78|er~8$|%~B?o(!pX}KrK=X zMFbsj2!;Y>KhLugPGM}#`c@ZfH}g833tsglmCy=+Z)buNt5e(yy(fCN+&!#oio>`F zTPGWsr7&K;!8LYe5(kT@ztyq(xdVT>Mb8Ur$8+f2<2VbRcQ^iie)o(QsR@^_`QEJx zTca>KVz}oeJ1a}fK;qc%M0sQFBF8P8w0)->-4eVs<_>)dCXkuzCPAFeoG^XYs8pmt zUPJ#lXTmmIhSb(Bt26u{S?xAoTohi9D+6D2ItVlstRQq28{>q=YV#VPYw-VeNT-O6 zg2%WJVmAD_TG!1pPS4duR>xgn>d(*`XAp0N1BjOb5JjDSp@?U6@LP13igi{*B-4j) zU>*g9Tg`7LaeLdSx_(z1@Mx|OFSGOQw76Aun}04P$1Bi9^De< z&y6(29|FY7Z84TOix*N$TbU?-6dR{^fC89wbU4-mk$1$)&FQ?!bZoQK!S2MAR4>9A z08I~@s^d+uw}Ie7J6?N2Ss$9DzW2#8Syyuj?R^anphC=}PArc>!sxmlt3kEr3w*<$ z(GR}qOf&Qgw4~8nvX@Usgg2EZxXrCIJN)S+T#mNKLWJm-PH7`TxSlJ2oYtvSaQl_{ z>&oh2#X?Q^`TH8dkj9Geu6+H1J}#f*&5|%###vDpASf>MPS_tB!Tr`c0*LP!0uQKl zfEuB>*ylC@!wtUG4>c(lH@hV+g`WA|6V3$l=5Pn(F_~-J_CZ79CiB@qWdDR!JWSLU zBsRHrxP)DC`B@c4U?n> z?;nNB-j{0OZ?^#%6?vGA;nU2KY$qaz?t?bX)#Uw2c@!OW? zlU>w@3%^H4K}N@ijjQghi(zd6U2`0K)py%#!tC*_U32SFp>gFlr+6qWxFnukgehVz z;5LCOtp+n6otEDrs&asMJB92k=#=E3fBr0oz5H_+2K6dFcYe7!HJl0r7|hqirT*n( z{(A;2-<+`DG20K)+QB}V&9(*ISL*!bw^?n!$J-(igb|D#*(sru z9J}-F{64?)Nb>GVKt`X}=Z_jXIYEwSa*B!uI0WLNqC@*8+$g{nJ}FHBPDe1fjUtWt zNA|XfKe4Ty?>n2djxNp^8o@%W%xm%wEPAKA%}?>mNp0l26220RD?5zG2d>Q*DLjyA6x+Z)fE%6*2kB1VP*}@?` zP7*IP@;LiGHQP5{zyMN4Hz_yM*yJ@yGZC44{i~_CNu&E(n5t9|d5p$)I&Jucf7M9( z+)qzu))N+IMfL^Hqv7bFa~Z<+B?#M9Bjd%o%ZLWlK{b^3j61_H8yltN>GzgKE6Xzc z3M)XNPpjTP#m&&XF7j=vfnxq-;UwSD7Y`k7HnQEF zBFyGd04F*>9aUeD6P{Bf7AsCr%zFoZsKpSySjKc;f_Lf{Y_nyM`4L`EG!Xuzs;ak@ z+=mEEnNk4-A<#EPxSA+((wJ{o0%g3Gp)Of5d4Q)N;VM8Qo^f;kXol$|0Tzm>Z4W1a zsPPPj5hAQToUb}*WT(2dyEO&QE6X?ON*mDwwN(9GFN$BJ~N<{-E3L7%X<+oZ8*Qao^M(p#kxUo3A8V#aTVXF z)EBBPjsg;hAo!CC1kGvt!YKulTZCjUe)1zsfTy>=oG!2BKCK0yv$hcpcyv&sv=~vQ zrxsCaa)UMPh7ZB+&kRA;BKt|xUP~apul5b4S6W7PJ6zC0o$iLsT(pAXWmZ1BZAGi# zXC;oI1Ki9q=~T+|ke0=4(WHKsk=BVM62toRcW`csQnNOwEzYHgf7LESa6di*=w%cgxUFK z4w;kIH&H~kXLJj<2TZ%ev1P+<(SKen9$jhP{rIZM13h*Y(yB?GQ1yeBEluQ{XymuG z10ukby#$P6>-|0wu!<_*yG-=X9bqO^+`yUMU}Rff$eScGo*iepG=Eg+?(R-+_}EVN zJOs#xoG*UMs`$+{_Y~8Q7h}%Pq4OEF9mMXbzbUYtJKlyC4NO7_(#(Jt zX&*ebgK;Z=(W3G;GZT6L<>;~WlDwf0qT%U+t?_G~z}NHUZvF~23JM~5l#CRdgATdH zqWphP(`n7gPXqky2PzJa670_ei#(dp1J57^i+~#^r9ioNbJH_fw9c*HYMC=>pqESjz~)v9_aS?XzF_-xv&t2MoM zpVb;hfp2mSn#L5{h(A=}yc>Zv=iLF6ugrPxom30K z_bZPafe&2zistmSVf4ci?W%A2b5hse0B&)UGbaURWtu2~h^E(=O81<=;!7& zzGCVU7+~e6mb5p=naB!nXU|O&m5Hb9xTnXHuyjN=kHqa_EyU@}2&g(gv@_ZJIehzu{5my)~pyx^p4&5Mll7 z6EQ&bGr&e+xSB3u(ZmQbB!)NXHGmb_rhAkRb;;XI-CPS;=*D1UGn=?xYcMo%D8aChD|IZf!|W0>SdtsJyktq`d_A^*$!ZrdK1 zbG$emIEo=dYk9It#ew&|pU9rTv0t87PX$Ft8i((5t7d~6#~u^;5Bro8d=6tP?BzrB zHVL;`T`{OK1vKI+HV7JD`dLeLt0Q$y#tFSetp!HAlQIRXS}ksLKWg@*wT%?vSpfOz zG>mwf*16D<@BJ(tV? z>Oj-}h+Y%B83-Kf>k?3I@*rNWPc!wnpMA$Qhg_CiM5f;C_$ z;`Pdl!lQe?eVVa9TY zDE{IrJ-Zn>He-2){yhH9X_D(|m`m0yh6`brXmz|uGuWI!8;3i_(x`D+b)~H&1`vAVCHPMvTPwR@ZRurbFexH@zl)ufTnF_RYEP%GfJPx&#JT_@Nu$R zvVJXHzuj(FBsjWigy8SGZmzKq&VTNVGF&=IRj>=d$NUb%nd z+n@mnY{|V~Y`+*>ectCcggIS5qk*Iv+SBnj)btyktv<9h?=GV>z=|RGvm$N_=@U~l zZ}-y74E(b3<0qS99!R)npmAiAaJr3S)J=-O$5RHObW|Z2>Xh>ASe&`!uVxgaN^8R> zcdgsP`ED8F``YE&4p!OhVIj0I%~O;kW`Z|%%Fo_;+b-N4#V0O_Mzmdp_cl^2%l~i~ zir7-yRosl!sPhHlW@@F+jozI*r&m-bC+w4ukFf2YO6n)Nqp=2ELDGQ{d= z1Thq}8|lnX5}#x@WIvD9UP3KZ8{0m7+vxb=ggzmm=?6*&ql1&zobc}fIiS7<$OcNt zlK{ODY!DGw#3|rcaXg1Q_^~(L7blL=(0tPcx8?d?4AYin1VKms zfnrSMMZWLe{F;x6gQHJ?c=u1Hi%+V8AW*MMW?riY{!X~vTq4K36Z%6@?PEVsXI;%g z_Y|8ls#ffB;*9Y0Q2gfqVehTts!Z1hP+5W!Dh&!sr!+{{5(TBZyQRBxDJ31!sR&AU zHz?iRh?KN+!+96X{P%44`JJnCIX4XREf?SOKHcv;iqD?g)^FFbJrbneuZj$og0Fv% zpLQ1e`#RhfBv4l~Y&Wi-IgyiaP)UE|YfLUVSJI4EMO+WOaJS{z_YaU4X|9tN?#C{w zp_<72|LMIEM+22O7jfdHfKMns(j}hxZ@9ZEoUTiB@B-3=8zKW5Lpar0i<+%8qy2qM zss_-^xpn#y2{`#F?n5#ao-j`iUFhmc0O5B0*OZLaKzhJldCefyz ziIL9J7&cOL2;u`2AN5d9nGdgYvbo(%U9x36#6b7aB9driy(^`~WA!&MdX34Ykmpig zn7fK!t>9FDDOB$1l{y%<{zjOwpdS_-0t#7s^RnjD!UA-7l|SC1P3r4MUzPv#BX~&; z>hKBjq?q6>XbOYuGqhEgxSAwS+{EHo3{4#d`WBw&FT2l`H!6mp+7S7XGdq z`@o%?1Cp0xwx~1m;!Wc^xQ>{ql1-j?_`Rs zyXV9}KZ3|5=Dx3(N->CGNPHZO+?l2&N!+z-`Et1rc%7znU$7tz?yu`gHbw=;yW137 z8c*QscRIFoQ1g?4vm}T=98L~lI0D7v89F%Er76P>^rPQvO__2&2H{bQ%@`fX$>LOk zda<#!X}l84<8pYzVkjv(_cvM$Bh+(8$}B!=5KWb7zzSRYzZk1YGM&bvjV{2>fCEwnQ4IMX;v3!r@*w^dHf1Wp~`V&iBIRTb6 zm@J2cNfUd$@p}FE)FEmqJ3DD47~BdySLb%rvy>~Pc!3oMIwql%s(U{?zhfI0f*!=n z#1?`VVT9%d=@V1zJyp_e3N8r>^pP`Pu;HC(+Xu7?bIoAJhIqU}2Z9*@B{|P?)xoS>7&;VKHt);d% z$qt-Y*!^vs{b9R=P(hB`{;P}>0<+-~^so=>VegV9Ns1*-NlA1>frhpk-t#zF zirq$AZ}(*%7$WRc(Vj3}dq5!t%Jb2A`6}xs62qS4nbXy=R;?eTAKAtOhZR%viR$j# z>jH!#>M@CVwD)aOOC3{ae>FJU0mLn_=L(taOL@LPB=QB;FRgm~-Y(&bKRd{^oLCx)e8Q8bC1{#6SS_~B%@a0LrqQ!D3Dif|(xiqTu) z75iKz9e7RM+W1Fb0Y~WJ2@2>%?Nc%UZafyUnUn`zZN$MD9ravBv!7W3UNsxdbg~I8 z?U^!Jf1O6fnFp?Ay62|YBsT{!jG1s3Ht&1Rw1TZUynsqy+F(6>aeMaB9U3qDM$J$O zr+GipyDy58$59`=OFs%_pgsZZyU#xs}L<{MXeql(mm;#w8u_Re9+75{CdKi6eI3S zW@27_H^G`>tegus68Tsgm zGo{Dt8Zqyf{dcB$D`1Y)G%z@Iw<>6zl z`L}3{){>qly_Td;6D%dSdG<>*Mdwrke1f7E^Ny;nE;xMR!^7y5CYQP$_45wZSrqY% z@TMQi3mEoNp5GIoEol#RQG<)PVFSl)*o(&{GH*YkxPZ zzfvblS>sLKIM^}3W_?5)WO}Hiz`;#{Eud1v?O&LKT9u`CwW^5J9|x}4o_{yy_A+U< zJ;O-HbWy76Ey@S&G&lA2En>p4@Pcy~C!;A72WX+Pd4>uCtI>+3F1EY=N~2^~=`9kAGbcx%{97RU(TlSF#BIA} z$z@kmF&c}{Nzh@~ZEc<;pt&ty0~oK)o>CyZB#;@i_8O~3K+xG8DuI^6$)e88ttUs< zU0w^~(jtgkPcgP;D>mWhuGSydGzmSSzxSGdyWd-(#HEP^z0l?>WK-(!s&FgDyrBjlW!^Kl^`J5LQI7cov+fIHgVp;S$FeHE?6{M3 zSA0&~ic`aF)7Z@-FwC>7k)!|S}6CV8nN)+m1yNsgV3O3 zT*@12f&_nmm-m{#k*dOA?1%FHbky}(U0TUj^t?Xo(I2M+zo$2f(u4UJ>KjK&O59qG zjxMSN=jM0Iu5h2O;WT7t&>_caY1Vn>vJT|&4qt5~^E!pToDuK(=Jhg$GAw|$%ijHb zo@I&67HIe}{hTTmS+)h%@+h*@dbC`@)o%_S=udwXBQ<|UxVo?~1Om!D!S)~bGU+=~Y}#6d_+B?Ci4F>0 zI}D_Q(TkCggZHKiY|2BQcZ*l6CMuZXD{tH425g>vQa9&OI*DJMm^fNdtNek89GnZ< zIwJTFP^qChl#rF-^`okcAs69egEMZ?7|Gnv7WHkJspxw(vwPDkf>)Z1Gabrm`@5*- ziAj71ep0*H6zjEI!B~7Geqt?XrB=R=WQku7?i@L7GZVa!vb`BUfcI(xHyB-!1qI{b zBbelfr26FQId+Swq%`tvr72ryc|xJgnijoYZDh8O0yfQ})E+!GC}GR~-HohkvEuUn%%k3jURXf2H7GDfm|k{*{7%rQlyF_*V-4m4g32 zrQnn{{E#i~|0N}}gwK_ubOyVvs?;-RPj-({(ja;9xhO%sU)#si_Y_@H4BkS#l0#O$ z8U><*`w7p)o5$$gpVUx2>Z&iwv}t7tLZ&}UoWYR7YNx1+6O z2DkY&@;?(vAD%b*WU(fFM;Ha`JF2s7F}K+U*HGYH*i6M!M(}aCcGxhZL;!<)(HCh1*)PIv)=`Guzm1$||#80hn4<(}C|mjMcJVZHX$~TVy6( zLorD>mpQJ^?-#};Kao0qVe6pcu^y!Rb_9PkW3|l*R21tTW_7`r1*FsUe;v7`UpHvr zs)jfzr6>(nNOLAK)z}5Wr=or}c<;4STBU+~$i#PM28`o=crxe89QaaXxziJh3vz%u z_IvePvrP-|ZH#_o{a>Z(pRU_W#8hiG@Y>MFNl1g%LcR~!pX31)!I5C1Lu=gYM{7vg zq*-P{k04RT;D{K!cNjU2nf;!F$Mvx8KLK{dxgN}3FQ~iRF&T9ci8J3TWhPzOxBSXW zCG3zIhcTiUaH*}bWM=W&)-BS$oEoiTn=`wNmr4QbSPHRL|JKmg+RBKGoyGVp;_2xS z?Q;m20V}kz-OWmNT#=-t$ta*SP+gI1GBP!FJbdACpZ+2I?T_WthB{{ZI+Te3wP>mvCz$lFImU~k4nGMK*)GhR+8sIm*Yo}BAv zLcq>ErKEiTKg&N!uY(JpFyc44VyLKP-ZUscez$O7`4x2Go8V%yGo(XRqSr`uQHlg> z>XXkW=BLgwM2mSPqnuLYvXZQ@6d(0fCjePchj%4W_YreB+x((F7dx=RgxS|eX+F*+ z0;9r2Ofq=Si{k-+7x?X-l(k!4JD?&y)x14tbhQu!ZxbKD{Tgh!`t+|!%m09PPa4d^ zTA3;2Y+FeTFSp!sF27p#;G_ft9_Z3B1X@O}j0w==l!?iX$O?_QEsePgEmS(WC%MWw z+m;~F&Zx$)O9`^+X-Lg4hwVuvVoHm&A_M_~^sow>6(o#bF#b2H;N=UjcR-+irdJDJ>8|VWM>71$`B1YNuZoMS{ufu| z{dZx%VTTDA%Sd4RB-(=lAzVDk6ZeeB^H%s=i-0{TBKYHCCYKvRTAFrGE#JL z%o@e;v25pq(QAMU(7QY3*uYg=@71e9$!`T;om8`R5PZ#MPj!5=q64{8;Q#X7NEsb4(II?3w;^+GCs<-jvxYIrPik{}XXTEHMW`-oC+oQk`eO}}4pxn|_V!Rdz! z@Z!ZU{HMAUQTPF$25Dp9+;p=@FlZRpoTruuCy+%k#D|jHpG)uK8ShQOrIK+Ns-u(~ zoKtriEZg|h@%JtOKfwvm6VU`&WD*9)%2i7fkUd+X=+dfQgQgRZh-(pmjh4#^R*w}S z4qz3LP8>uvhL52(M*H>9;trU5H1>-Rst1cOWE2qSBybP9I}-mu4_KZFUyP#vsk)}d zw2y&vcK-rYHGUpWeZ0@liGra5YxJ{r3!9tr+N;0quauUpbT!|tW}%0)`2?lU`Zx#%1R}xGF%Ks_(a{cp1j>$gT^c2mV3nhIif3U|S@qn){JY;d z97;KYd~Z?3sGw<|A&F*XJ@YMw6)bB*C!1=Hl4m$l*E89_o|Du{d+dWM~h>2oc6dCibyf zHR!;%O67A{%IxUy$E~9}!6sp@hJdDq57uqd1&A4w`;MLhWneq^M|rP<%sNmqx+K}DjI1ukqYTNC)ryRS?di>n@wAroz3MA zK^2Am&=aKQ#Vod|&0b-DDrrfDhH^vO-Hl!4JRvYAPS$@hmD0_mfpwXgst}U^)=Pw& zjFp>hfT?}S(~R84dNXV)NTd{X zYUxCoW!+H68Bf7Z;yi;RMKT>|yY60NcVa;&J#snPN#@Jzi$%q33*Wdb?Kh`+5JBe- zMRKyJX}!HB#d(bYAE>k7*`aIMV=nrw3UtSP-B0r{(EdmlGdCCkM`bXyl7E1=`Ke3T6A7#I zDe?n@K zp<9O-6Xv4F<5wbPfdi(QLv)4Kd#!C+yeuY5R}l*=$*upgf$II`*J3de5x_{qebov&UumK$cntcsW1AZVU#anrmx9R$0>fEfU^GJPpgDfOew<%v!)Ag(}LTsxUZi2U5kqoOW9KF zpmCMr_q!2dJ_axn@>pIfJx=`Sv3=inno%(B20o<6ppr};?tP$zJImA{xXiZ%e|;_B zwoY{w_dl2ojFG~Z$MQ?GR~{!AnF&brI}bp4#~vX-#KCIHUGEdemEFR%DOl^`qFs#n zk6YmDxlkR@wH=*Uvd)CkU?!#UoXvlkq@VxpjNJe~HMbA`_5La)T8vpH!J4xKclo@- z-->>N@TnAEoZn<+;a?;N;ZpGrpBgOAPUN(z_o@^+%sx>+@?vs_I&BS{@q--=j4<%_ z$=n$iZ71AH;I(irbV_jme_8Q|$4bMB{JaA+@9Rk{!}eCyuav0bkqjaL;0NKfvxM52 zj(Qv_DUV^qvEn2}2OvHgK;OjBEph#U0TMA7vuw6ffl?wEqjP7|Ag!V7q@Wb`Zp4C2 zY&%j*u~V5^p%G7h7JqCGU!WaIi{a^iu6X=pH%@=6)aDIj#%_?G^MyFH4SuM?&(LGd zi_b^(XXhu@n1fNVeB%h+|dPl zCfdMKZn=3IzM1D+(wAw_9VD)^u(&Agi|%~IoS$o8R|-9P?9ewdFa_uX-cgCzY$}gF z2RD!K&2-XpY*gFbmb4wS7`;j?zYAVt16PY@Nqx|I#H78BYs z2+aq3lu?u;<0bke9Gsq{y43B8Nk8e^yQoiHTT98e67+@EA$^uv8zM~6;Qai7yJWVg zUl80(Oh@7?ml`XS<@a{|l5sr7UlfvUy(Mf+_`0)3u11g~csi}0ttpYm3 z?G~+Zk1(bEe`7*_xIk>r>ZlcYM&uq_OEKc0zUx!}{sI7*rU$Fpu3eYzj9U(L(rK|q zX&(;>)vf$QMsR%s&_kc`V2(%5lh4?=p)$5d1>Aau{8GgGZC3j|To~CO7xaYRgVkDi z4c-eg|A4MP6&1mvN*4YQA)VoZr|M!B_FbC=o~yu zPN^UK!VR#$`$EL=Ud-a-%Q}L6;9r3^Dk2Ulnna6W{zc5c5e&#GioD1cU-03s|E%D9 z&GBam5NX;BakZRCjx`p;tOi2z3ge+=(FIj$wdz{QEWhVb94{hg)22ghz7Fisr(56v56TR!X{^!u#pY`dYj zrpArS^s%O0ijVg0h*~*7ySiCJf)Ai z-9}G<12PFSEzB1Uzn6Hne&JAb^IK~KcKF-MDZ|__l@g<#>UJdQM!*oaAiFD3JnBY? z1QDu6(oxM>iKNegoHsK9NgRI_aQr`#7!Nfz{^BsQq&%C;XJ^|j7bMBUIx{#UrXIU# zmSt8Wo)O|kvCQR0AJ_$nf6^trwJv?-#=$`QnJ1Tz%Q=O<63U23@9v0%o`M^$ngM1{ z#jL`wmbTuX!$E06rq{0Lb2v(NnqkQ6xtA%dhEmknJ^$pLQ&*g^a;}Q(1DhnDYu0A2 zhl*Wlt#wYt3o;cZO1qtk(=Ax_z@P{6&v4oF+|3ykp}n;)yrXuaWo25Eg(GYuD;L)2TW$_6Mb#_WVE$G{^V=fLZp&WT3TO9@|=6_Pqd3p zr<^}cq|)-t2dEH462Jzx0-k~U)USEvKR?{?+c}LH_asg})meR?TtoSFWg)iebnrn8 zU8w+F?GWko$qLm;-xcTSP)t`{DWBYB@h}R}UUxCqxs}O5ljqol++$nPyNDRbU%YYB zJiGV0Pv(O|zh*P)bf4JX!&!7VU{l-pPHk<>&k_Ub{&9>xvor>Sd>BI|Hv zwggdHN-U=Q6E?v5+_2ElQ2TstyN2ofV7;u40ZBKrynSh7wbSsQ-Z??Hd+BIn5;#^r z5QI;}UaCB_jy+wl?$`D2jkG!WI$dtL_kjSdNy@gZZjLCTh>#`*wMhp90%5^>Q)X8X ztCuCNmqeDONb-aD_GOVwQ>OV_IC?M;5c-_wIws0X;C7Y_gOQZX8wx?Bi4$uii6Z+G z#sdtlNdebcwKfk4OG!rs{!?kaRJmX~hoPj2y^Eho&E6&F2yHHW?;q&)Of(?*26wHX z=Ch3vrz>>wXqwCrk2dP_S*JoA_f;%SWMOmSWTsl8*(?m<<0@)M~u4z^ouzBU;I-)0jGf` z%4AlLgCSHUQuHP)kajs8+cM^^WCt~AQZ+9bW z9T)327WLeF46orr?27z*xtM5e|NS3g0+m2gV~roGi`d=PPtckfik$dNR78KVCeT*Z zf-YhirgRjCOyUgtg7zcyK;6=!x2{=!DaW9^{G{pVnO4y0D7pP!SM_xMlLd<=IPpl{ zz{>L7f|Azp5N8-SzrSdtDQOfGp-42`X%C>krd$eUxZQb8#+x;WIOpRPuZYF?rSmWV zl!ChVqs(juiL?I*@Xc=&FG;Lcc=`Hd=oNi;VuWbF&>FILnalYL%7X`%Z&-aG9=E^G zwO0}|Z+oAFOb%-27zNlS<;kxqh}$%B$LnR8iVbl_TH`F|dMStvUqW3;pkcMy5lAb#Spj9#(?}>d(bhu)Sj__YlfE%M?V!c%5NSG67OD!77}wp^ZL0pMsr7bxnYNSf^0djW z@4AIp57bvoE;LQrBK2}pH%9MYJ3ET_Z%lgYWdW?&Yfpr~-mW^>qBR@MRUMm{SnZKg zZ4*Zrk@0CR5dC_|vOvtsz8WlbO?y*DonVvw;A&0OFP_u|{=;m;?ufrIV2jAsZ$*1m zqGpVdF7+pq^q5Fmi-D_&=wtD zstnc0PWSftRg$;Y#&KJ}owzgHs*qrCBG4xBEM#{TJnWsQiUpb5-RB^#K$c;~4YeUb z{Zk8(Q5V{;4ywLLQKf`^A;Wh^zCEhuA!qaCc#nepL0 zTEEkI=lRa+9$Il?P3z=z!_4fS1&cPg2&E-J#BHnI88~)MWly{so{Tv{(B5_ukKHD6 z(oHLrQ%(3eI-`3&xAc&JKc-c!jTH_GtP)wJ^MhAZ37b=CMFnRoCG9SF`2Pj%PKU$g zJ>)ZC#^2#c^d(|5{6Kh0(DKh#!+k}#FmZtGeiW(qb?;*8K(&)c8TIp>RLK{Nm0i_U z9-(&IC0)+fyr%+1%_l4m`Jmj9i^*m>?eL~1%I-jNSK!Cb9SA*7uM>-3dd0+>-eJWm?AVUkI zq1!YzLdo@I&i;GWoZ&$rRTgFI@hOpH{em*pN~oU`>|dNBpUmwtRZ>$r?Y_JP`;BU@ zQL$y@f^MVYOrC3DAjfsl4yidwrCKZVYIF(f78+q6a1>SFCKkVBi7u_}zz25ab+$LH zn{t=AyxHXB)~B zs}lJPfj4+CV?@BIYTV0<39N$P+fWar-Exl*7E@J^bY$sm&mH@0lU5tJ>m2>EZ2SDg zm?n?7z+j5PVu4uxvMBmDy1_Ms12css=qGi<7;~Puf)Eb}6N=d0o5FopR1xu$Cg07a1Kq(Rm7oKsdzX zA1}W)%&*g+Zf=l+Vuz;JzkML}ZW?iDk)F=hWcW zm1QVHgI7w{Xqr)*xhnJmAH-CdQa3zB$7cmw&-EElsv^nZJkcnHa5b8%{;D%fd|(5g z`PyftgEkEtd>{Xx@ZSgZp5=sNp8G>?!H=ubbQXCd{8rUJ&Q)#AN*g3|NF8tVB8eWv zh4%`*l}%Zgeq+rq=fEi_bZ<_6^K*g1>ATU<@A5N(y=NzF30~dbxnJ47fCD}*4-GUD zj4Rpv0=R?KE#D9YfW}-7)UAF^A&9U4Kt>96c!`}cp<%DfEE$m!4GEk0jDqzHzJsHq zEACda(G5F&6Z-L0Vp*rkZMvuEHB}l#sayrF=E~-fMgEVNdPbxP!8l9ao#_Vmyy?FHS6FFk0>6qd_@Vr6sKU6<#1}TP7bCLv~mub#} zyFr>hExNw?2amBtEv5Rk5h1}~Lb$qUan#>pX?@m8SCR!xliWJqo3qlaio*32xp0q& z!P8#N*xf|0?vu9J-lIb2>+exWLhCmUVl+zX^4OzXbO&vzwGlDmk^nlQ_VmpD#0D01 zq3(?QoZPNt&o@ee9-p#|2DOLQy@^I_8*s-9NCyQHN4~mOP^}jiMP!pQ^&9=3H+=P7 zo0FIZdv~9>wr7du+<#vFD5!jKoRI&He#AY|YbYe1>ljV29W7d%t;mnZY69*e1-waV zkS2=#2Ss36$cwgB8J*5U^%mUZsP2p;T{vtym#`cUkoCfG>a;{tlfrNbA=U!}2UlU^E=o z@(QUT$lnx!Mym{^cYzzA8&dy;Y@fz}qwgdMq6&nQ)un*Q;a`(P@vcFIeL=GAi5LHD zzjENsN1ax7D&v*4-_XZ^ayXD6mnD7b>`<83`k9>@?^Rew+~*er*MC+X>$T{-g7R$d z^fbs3q{yLd01H-xlLp57Tg2r4`DQpE6545$QQVqTbX*pv43ycvf_O87yjamJ9XHD1 zcPW%H>O|RP`!@?T1-oBNTi$vm8$V5)xS^E(VrRv5dB6^`tj`^trDf5vA}9HhmTZ`s<@L7%#u zSvIHqMvOT6XJDSwBO&-J5t=ES#1D!ZtRw^>v8FGscjP zB5v26aU=LJ_T`GKo<pErwmiu2-t1w;A8p-E|TKJYed5KuG81iXFVp70Hq+%OH49kI6f5I{kLaMSt%4eQ?kNlVCBfi}eMD40}OU z>jO@4XZvBYS1Jc#6U=R*=HLa5Dv;d-Rtn)F>Nnz7>A5RGJo==Py79)tSBvITsH*T^j!KCd7gJ1iX>+m98yA{;w!N4sIDwFj^y~A}S{2=sUy6r7Fll0O1)MGhUGDmc6Yrj|s*GN!65&+|&cG?fF zhS=t;(r&8bshtYFp3i>;g7uV9C&!-;_4NV2)dKAjK`eyz-%%g{RR=cDS|YCadaZBP z`iHK6ZCP{A#Jw9}R8ST-yB*{VJj`<=6C7Djv8v)OqD(m1dhVRgT)NR4CiC_f=~dTo zJFXNF4(-1NN6-W5kYzS_*!tA)2~(KAUaEBn);`~!iHnExr?Rqx-vkb#oFIK^4CuDT zcgy~N=oW{_!^1NumV?%eBOj+fti4r!gHB@oeaHN>LKOnlbPLz zJ3&9a_udzNn}fSEdZ$7vKT+f#Uly$sx9A4GSAl=019d-I%&F5{p3Pu%A&0t#X)26? zqyYVH%w7M{@@K~N+=qDd!na^tD8NhoGC+a9jT)m#Vq-w-c^LSd0h}0W84KfUYQvs3a|IORC zAVZkUf$?Nd_oJqEJc^nn|0dTsa;Z@XwOi}ib8{?IGR!>3?l`t}#3~t!w{IAKam?D! zpIp(i1ylph^FKV-pAgJk2uI@v;$4h1`a#SCK%$Y6$3&@4R=E+vYS(G>#-X>o;C{ZM zn$Rs6GaNOU10;BVqGpW-IPh{_Mp-564~2ZHN1Zx??{mJQ(Ge29F$v=Hd?FYJbyT#e@&-`fuXqKrr>V=qo@p2WV# zoN=)5ye$6MG1&FRZ127D@k5mN6}N7|;@|+4AW%N}6QIneg*F$bv39)ms0XDdV!rCd zk8aRcgwejT$JY7o#ZkLC+Nuf#jto{z`Rw!xmlNW!#z+D!4yuq z`Gr1Tvk3+d`1bGI#OiGf}=x44XGmO6S4N*b{ru3N^9lgjt zue_=4oLk?*QwP_a@%N9fIo>dHRj-rOx-~mh4n=WqoUy2cK$%LR_v8;lj)?Ai3Te;9R zpd>_)ljUyI^kJ5rcB(k~#Y0C<+{Y^yO{GdTN`aw!{Nk%tBl&H=_7{m4E{lkg1Kz`( zwm6lv8ih%aV%xh64#2kR!NW!UXEdc$iJl0!7#VIn=F>2+{^a5^Je6vDlXom@$7)A} z-!JLKFGvGzu-R_@nLK9u)ZxlfjNuh)w8uhqSoiSG@1RiLLtNRO0(cNIx+X0f(vViG+tz3dW3HRjINPqMNIQx_vcb<4D@}GtornWw=(C->;a_4Zfq~9Z_MFjQzCI!fy zc$_~I5^=9}y8P~Y7U85~OWdL?5j9`4OX8&(4<5O}-ZvqV)D8^_q!HpkVO9i6(u(CL zCUNY0mAG_DA8wl#GxluodR+=W%_GVYUm@Kd7F@p27hKfEA_9kDHi0o#n7wLp2rH5s zRV^-&$ZeCxfMjlHGL%T>nV6YPQ3o8pxs8BmO5J$(V_8Wi^`9IqME4_FQT)w2Gp^-%$kl3NO-+rz1CK}2Il*dkfoaG) zuZ^=lm}!}G4LA*`Or45n5QcJJR^E9(A9fH-uH->D#8IqoEnm;=W|O;1_6SiAUL)wf z8@K;IeEb^fb`n@Fek{V;f>W^|e#`ApNGL*$gAc|~IrOn_&_+hruj}+IYkv1> zx&%+Lr~!JXnb0DY+Poh; zA#%?SqD(lxZ%L!I*F8X=0>wlPqo?P5G{E1m1mHCD3%vCI101L%;%9JiQdB6}?0fbW zSF)e97y>tp?1rn7GaW5QW80|3DK;*aH*6tXu^(EQsc9`f*ROU#l7Ave&2&}uLy$Ui z=*}73nUuu{|CBYh>Ste5yKE66R4W@GD`0#g(_5ZuYG&5;l2l9YCCPiZr*1Rc@#t>_ z^OwE>AxmjO%^8{Jocxc%mIrR`eHrwBd4@y~TiI>bs!At&zG4(Aw7mVOY|@KC9W^v7 zI@_G4AOpt;-eyAG3$ndb>`S|JPt1O}B+3vXR4J~{q2AtloRN2xEg6RL>-$$15L1f- zbzz9o9)p7!0FPTD2w5TjlQnZju;@hHPjZ_nyhGbsB)23Fel)pV@@c}cJ!tT{j36nPvL z-waq&kUj7UXVsq28X{{2ea2btm(h#nS$+2e2A@*8guPAukn?@=6ykb)As z5T)r*E3owCCTl5265CO$_-Olf#J&2!=U+ZJC|Ow*$bV-P8rkoRrmIM_UqeLc^m$rX zh)CWSF|Z=0@{{xdfF&&u@?CG|W3#ti8jhq#Y2XY+-fFyI-*i;PW-2v<&8{#>E+CPw zgF(QJWA`6A!3a5LY2)9KGwqqLq@i! z%{RM{cHb`cO6V`#59U)ANo8BeGv)^8!C*t6O*qwn7ZsvMZ?lN!S+2I{Y4ycSWQu&Y zUK_4PdJdBA%uk_BICmj8ocIqra180DU>>kZ8}Y=pXm-7koKF<}DLq743MZK~-!+0% z3^_t_Kwj&++CFZp0L78^(+okp*70Uc`IF4#zGUl{FL2;;UxwuB>N<+A*)|&EX;yY7 zCUZtho3z$B5ijUuZ~qzl17{W)##10OLhO$b{}f?pGGghh+?SN}p*-2U!qh5cf4L0X zkggbpL~fNsr4FU6I;;5HGPQL#0M1@y^g+r5-rc0W!0!%j`MU3sKcBCJUOg59$`j*t zB(%wfG0~~4aKB&mCm!paQsr+Am@aFJ)n}s5x zk@+KGZepUtm=>F>2y`s~o+E%r^PP@sFO^wby?LpgNRt75Gn89(mZXq4+dL=U!|Q&AZ|A+?u9PJZ8#)5Anme;X4w7n+l!3{;Zh7W z@WSR+J=Z`3=V$2;>Ns*Lf}_t#`aTY<463ZFz|-R2G}64%>@P4M)jT&!tfpMf8fE0y zkKx;CPoJ`l@18Opqva~g(D#aGY zRa8JtL5!&-&^P|z` zQ_)@G9MJl_EKmB1s*#}ZLKqYo5lHBWiFkvI&;-$2Ydy@jEAirshXvB5<>f>~K|Ze` zuyDA0TZ};Z_(nGWV}{pA(-=my!#vYRzf4z$qv^^LKKpqsH!^6OzK&HJ7es+Gh^1Sx z@fp1n7PJka&jPe@42#!#oz-_`2nerKyrltXs*A0`@l*I5W)Q*n42rLuR^()cIvPE%pF*Ucx*2L z2k%ylN+nU@y0>|1Szi)%>8R1z_w#a27^kN5rJz}c7>I0}*6FXRmIYU^%%N)lC5>O9 z0}gW&X^E`6UGm|G6gu5*yFKwed5-JN-fEv8A!0E#62E>ghzJ1xZ*ZVlNQZB0zI_b- z{te=4;G>#JWu%7&#~Caf+1%ZB<_T7(Cn*Z`Gu_Yfqe0sZSDuVNrEFjF@bdDTiKJ`4 z;o$1fj4D3ylonixjAU|wi{b)muFY5_&>@#<>ZwFd9&B%+Dr8H``l* zTFr!KE{h8>{st!Cop(H@KYcp_%oU{_$$Q@QK(YaO_(fVyj@e7vx?&=aS9#LXt3jXe z*mkbPokE_(%Gi@~h?<4%r)7)P>acHJMx-`FH^T!qX8JUrp?ypsS?4RJZr9 zV<_M$5mZD}%hjlAQjhTIQ3ng8#1c_Th)CUYEwel7=4U=)9L9to7oZNzUy-It;9vd; zCRVs$=^}(lP#?LQQ2$X7JQJJa$B$q960tnp8#;)%82YS&f?+x z8?57?`ZnO-Ng`i#IG!Ozq21MOHF+*%#jWhUi!si|S3lV9z&yaz=X?eQHRtQ5tHz{v!=qFwXB}U-)-|R%i!HzUJa9fK+0CVvm7EYUn<+fs6Mz5z+y@2aAFH4I zJhG?6_to(619d)enmfE#35~v38?^bw%ML8PXD~AJ8<>75?{cQ1(=0m;ak-897>Z3# zd{FU3%t(gZl}}>vUqu>roHrc?Qb&B-Q|ue8teTGu%iHPijR{lz2ta)5C2tueV=PQD zo#1Ek;mE>8>hOrSSCV}*t{0A847C0Iee;OlTK%RgTG|{-#J*g^G`>c(@|Tjmz0(uV z+(tX}_;50QyvGHM2)a+0TuyI{>Ma-v5{>D@7xI>huQ9tHGSU;sd;hF1QVBj3tew9L zdx?^(QuK+w;=xHg#YO8&N`dbN{BmFJ4IUgt$@v zUqp76DU@X|IXy%vh?q&&$Z(QL*A8!-gEOv-6Yv~ItM6hq-#VXoK3OtB{^B74sjmc- ztz|fl{8O1K?=mVL8HN)=UcAG{K^Bg+Csi7jCK-t(pj{FE7e;VYiVZq8cKc;2_M28N zM|?6ZK%p2PUI^DuZ)@RglN;5tF8TI{i*pPl&6FqjXd zcJ+MEDt!22D*LxQ%$qT`AX|rx${Ee?uz@)+SO%fIaGQEQ6P zz!}03VfAvW=tyItba|E%LiOw&199aMPu?Z&Iv#qkuZBrs1f&5Kg!%ZH&|h9RaQwSl zYMADak|C|E-L6edg~|vgG3z#NGb9~!Wy{l%i#yWRTgl1EDbu3|cTuR~6xM8~yoGn? z7yPNpunBLv6fp{+ps(iLRRWH6$ukh*AfO)i$zD|v=tMn(w^4o*gLc_(3y<;3XUMm8 zE;Ol7Dz_N~@6<0p3hQ%OaDl4}d!*gT+%kO3mAivf`h_Nr zU|R;Xq%3q4@}WTPXIW8@_N3W$jV#$Snvj68(3ElU6~z zn2ycS{o@?fD0>8M9ykOf%vpC^)xX<<)>#=EoWNa+UhOvizFT`kOCRS`?~W5m-qaps zB0y5#rGj;~LAjKL5acqr5Er?5Pp9v{diE0TxBDoi{o=uIHL2nkYz8{nA(R`|-+FS|;qnIQ&% zG7MaVc#CY363NY+vV-qmzeyy4XHVc5VN-hfw>elq?oQ;~PO(uc8pmf|(;&fk2s*S+ z4+>&GRks$6QhLUZdcf1IDb9CqVKMZp(_hMF($q2xAV*2e(%5Ou_If%;))c{`4>*DF z+Lvry3_CfcTwgf?-Siu|-OhQx>Tyay0NhbT&Vx2FczhM@6HqWYP( zTDX^Nl7Uvkv;sI+4#Q5$e`dhvF2oJ_p~EaumkRS!5@E7##_vVm-pFzGYl5)2ByX14 z6@zl`66y+k^&7U8sRHTzv&Wi;)rRPn4O-lU>SP!q2%Cbj7c2G;-;q-R(Iqj5@LTrj z1Kw4jTffi^3ULO{65vUCyXk;yM;|yAhd61Ky3$d`F`tQ)(Cg>pwU3Drw9{rR1z!fP zO1;uIN672W(=X7ag>+zrXKh;zsa=q%s@@IHqt%`>tzf*DMwxKqp6K8sxS$s}na?=J zo(np~;+}&hvIv~tsa?{7UueC97m=}^fd-TpY0NUG36`(7b4s@gUR}XEjCMdJ@~MEn zDAkP~q)=;ASzCU2TOz!++i@ozg+te&8S&1@Xayzxl4yq6;A$dO*=<5{41L(0u2X36 zP>zymwYlGAj8$*LSG6bojBq-EUTn~(L>SFt(b1ifiy4sR&G+S1>J*D)SSj3~m?8sv zEh#)^q8KC4K(IMP1zKQV02jg_m`(L2Fu`y`=n}soJrtE9f1W<93w-VrlV?r&ELb3$ zY8O2~940FU%OulY_joB_f1;iiGWZaJiKn9Zw1bo5AeKBys7`21BxE|dV(-ZTs<-F0 zbN`dFe=zAJlVxq`CLhp<|2uW+((dtnxQ>N&px``N@rO&cyXE{`ejG7)sv`$TB^ekL z@>g&Gg6oUh7daX2^YewR_g2pT+8?i$7O5qc~%+s(OTAKkJb&EH*kJ8+*+XBvjW9gs5{d63YWHH(Drd8$Qvt2 zL+g*ZR32N0O46=NtP0ojcEgkLGVwt9MspWk*OY-CcnO6DK8`aqWm+%@jz9a|2wlpRiJ+lAW(Z>TWn+{z&AtNH!5)ZMQsmXr=l zpBxdC3j_84_Kx+)byQu2%_Fnq$Wn|2CL=p)E5&+-%n=bg-?b=pT|*56pMM+)UMl(}$560;&Po zD93#+S5?|&58lAr5(A3hjl;^;$!w)kOrXzr{^1ufQ*aP(fF%Ty!Ry{SxVC(e)}U~z zopI1={SteYW^pR8VXcsNv3~0u)WZXp1jtL|QjkPh|k3 zt_HwQu`$F6?N-;5I8@!~Hc%Px$@t?o7{p6&LV1tsjGB!YvnNbCbxZg9#?|6gq}mRvrE z%On43U&L`9a4CFw?=Z@>4S7v1IMpk`Tmc`eyr65mf$?K0NLcNB!bSrPYd=L@}+sU*aZ!_NWwW) zhM7iqmjW5gF|b>o(HWVwp0(&OUe_F3ZCXY2TiT-F=P<`u&>j@_Q44e`_1e**4WX?! zaU#6sN=B!~%+U<@yOTy3kzWYr*4Iwv^mk-|kwrM&fEgXa;0SOz>(aZQK%Z@e0~Q=P zoiu9IbGc@(K;DsV_Trr1X1<9<3dQUjGi2auz2L1ZXJl9U*#R`~!4miolO|!A_fue& z%H`!?gN)Y-mJXexWMk^!#xvSXgH;w(zJf^$tvUsPJr^U_R4c{|5%CsEtTFgKg zbl=T=gw4VSo9^*V4Z3KB=8+`z*?3ud>P+>xxnp7u8HKa7i7BRAZbenLnxn2*QsI4m z^4rE~nH3$P?9*|AVtDb$g>=7Y$wmnm`dtoAz%b1l9Jvv$CTj^E_Wl69X3ni(z&Ii%W0-t!;*}xr`=N9n% z8v10=pl|nbQG-usi)N|x1%{My>IuOpfX}Cbsox4v(#S~U3c@J)^Cai0nvNFMQuC(i z_}L6#$({37xCQ4J2s^b)=xF7OL7Ve1jM@yr9;|iuajI+S$arPZBMi-6Zdq2E0FiS{ zGxoFdhQnMS=ha`ORN(67K(eNLL_xi?3U)A zWZLZQcx22$fM~}D->feAiZ1}(@Dn-t3{oX*);@4t?{-X;2HHmIf;=D!a(?@pl-_q? zaTiQCY=!yx#o2+B4$bqjv3{q5e~#uopbLFCw}4~+b0aWy9IT^|Ht}re>@S|1jK4X$ z6+~(qYuKwN9tf+7>=(4l^hEoB?%=C!HMx6u9HCdJF zIhO9nP_$THbxOl!pIn>gB=DL#S|?Kp zcttMgW(W^KmUP75cO%4(JV*9dh#V%qTE22cAXP#7fwpVceXX(EzwUZ<)XECPP*P)b z$8X#$vGo&yiz=XTDq|q zZlRJlQ`T_&x!6&B?81C4HpUKiX+@ep8!bM6rkT!dB7mN9_~3z-Ef}@xJvFdqo=kA+ z5iImTfCx>714M?7>2}vhS06IGjJKq3-UlFhHhUShCekJ^BkMkky4&jpiG{=8{oBDG zQx87V7J~K30g|6)OPB(tY{i^bEu`{!f^;@SuQ^F#h-W@y1WdtZiHtN;E!|;-nPtK} z;!d4`!WD{#*|mx9qcRlC=J#S%17}E~D7cAnEIs zZ2qH4Uyr>+3I|$hH>zB})BNP7&3zb~BYcw7ImPI*R-8QA6NN9MjE#g#^G!HBdVFW39&ZgHRNA2RBXv!3p`6jMvSi2R+_v4i7qvF^ zjOhmQ2TOsqPnrpo6K7Q_!60oRgtO*8%cz?5-PI&RY{}82cD_*yenOBxmqNCE5s6)V zdx>8hzbk@wlgWa$lbE)c5H4m7az~JMb3-Yf9J~!ln7^1Tb0DZK1F2?^K8W5V#Mj^b zCGp@w5KS897lH~GNWD;;$k*G zN9RVnDquO3YEKZ=mONHI=FcK?hCX>AZq5@7qj< zq{vvnT~t927bz|#$dY~AFuMhs)tI8Wd_Eg+*U+9Ft-XK;jHoOyll?h7WCzYE@g~po z9RtG~@77UI9EYPUtyC&8(Xt#nduHyXi&DIxH1Eo$9Iq1>V==w)C6IppG>iP#_I#(- zi)m-QJPLU~?e`0QKJZ72Or>Wf@#6;awCY(mK?X`rFb9hF#PK%alOI8vU!7o!wGEwx zZME6zyLn&Hs;}cI^`8oshyIKz?NE*_MXlAxju;oVYJS*a_j8<1k^im~10B?#d3v%0 zzS}XRVjF99jqZiY3y{vZVjUU2SY^2H^c6$2nWytCH){v1Mq3aU7cUBvHLnO_K6D5z${uSsWQs7l=9!vw-4yC{~0Cx8Fe4 zDq8!1K)vUhmhz`?nL;MNih9Oc9UZ&C z6~@7Cwk-`)hJZagJ-1&n4SrwYwM=*(?)=qBB^$fFYCML;t?YS|XkV+!&}zHywfa*C zW4_XjlR?%H?_F*t2}$$*0IkB1ZI=B?`-hh=Fh0;OJ$Rv-a%nJZd3)AQ2SMn2lX^E+IyZfr^8y3J`d>)fvZ} zO=QH*$|_(X7gRS}33^?8Cx#>vZiZO-@)G2>pv{QD*+;^G+~41C`C2?a)PYlYwTVKH z!GiV+PQ1dGGJxRFww?xdC`7g2erbqCC<|oQt3`FTWRB&e-N&))SOe^T=epW{wi!O; z7J1BUtVP#m8)LuO2kW%{ABT{~4t=r+uOZ;+AND(#M7y678E6&2YbX8NGRKZ@aEE#3 zQAqU`x;0cea&F);DFSS_0*xMiWgj&c49|gVSFc>OR|*!pome{LIq_eYnDD-5@KQ48 znbpGPMrYSAr}X}w_R-~NmtP0vbJDguKklBC zQxEv4gQ1ee&AoDrPlG$Hv#4>c0+gtavmeYkwD>VvIOn!1V zBo@UEa+yf~oKXMRxKwb}{q4y^TC=b~Q0=7*Jtj`iy0^hn?K#f;ZtM2A=kKh<@qG7I zyH;8nC-Nj#5*s9dPqmE?rl=Mf@={Vyix0Ey=lH!>AKozo%`lZv}CRSvyQp@KcAiBJ}xBcsIx`}SyZFbwK z*10=TFMX|D+;l$jfS#*AHELn-AQ6>Q58!k^&lP?Qxvc#NZVf%Gp+ME4=^O^EpF>=j zeO{OETkw~To|T*o&peLORVqex5#G--$%=Rp)pg_NjpC&y^5Qxso(~~zp>+p_^y&Fw z*vJ)va!ZkmWv>OLpX=~E}%d##^Z11}#yjaz#hdl}kc!Y%Z)`!Mihi@cE!!E--%wRkrEccwfFS&Z2)kp#lBH40RfW$a^m?H4DZcW1F>^7az9yT=I05`;m^pBw9f%Fiy_x0WJ7Oq%C; zU}8bS?&F;9(h93d3B3Zt8ek>Xu#UA*M_ZLCWpl0Mh!A!uv45rR6j6xG_yB!6IV@3D zV6nIIGg>#tk4^^01}W~Y)=LIpa!e3;jgTePh+aG2$A;@qOtC4vk(J814&O!XA2K~n zyQ*9;Xn%|wE>-GE;}#8s63mJKFCVOkdhL?Cb-E>Yji~n}UODAm8P<#UV^bGOT-(ZU z+pjZ2-K4z>Qy0ZD?z5LwkrO7Lxw_Uy^wfYL|3-{c+`f`V9BVXV=Tp7q1ksDWqpO!p zsr;PfxiiuwYg$$!np~RAnFBS^N#XWvV(5un;1`~pJ2u7lU zl&PN3c*Ht?l11ycmz8v|!jyQhl2Pcdwo9UiOnSD@0M6T+B}X`kdKPyoJl#JX!Y`Hg zpJbQ_@)@+<*2#i%&Zo2JGmM7WGt4q*f<5CISiv*9cf5_S@p*vrq@4f+(a+}3qT%ac z7nP>ny5Fo8fXbZ}&$SE+FbX(}0{*@y^58gi!=BqaEXuR-pDPPrx<((%LJ||;4;Rli zrW+o=i5Da-N=l}WA*VqayYF=WHt$2TVLt7n*-m!=hx>Kk$<7+V=mL2WTau*X*e?~H zjRX3EFW>5iw~*O@Vm~J2?}upttV1awsdC;$kdc=A!q{9@hWig&bI0CKw#fbT>W<{o zzkED6=;HWc=qrahiUu|uhR8M2u;>5^CBHQDg2M<-6q{UQ1ADJodw$&Y99Q1HAF^>jH@a~t)u+P zgp1>bciE|H9Kvzz#JcFR3zr`PclvZF=e02aE3=EbBAQ7Ys|Xi?2h$wmCQe+U1UBicu%dSJWS zk1uWwaF(!l0R}ar&pZ{a-yYk80U-2E*b3aa7zp(sdvjy(O|JNQIq;<=0P6jBIsJPq z>~zir+@F=>pbmP}k8P#&2Ko2hfR&@?5#Z~Bg4*I)`asvoBI5X==62e%bYS#N zM42KaM$blWAlsq4wSP?J^?gm>c7kX`= zkwj@0V|ky5i5L3&K(%>b)4Nsr=cdas>nb@XA3^(dP_?mtg%K#Hvg_X2n36CLLshDz z@2D6-3m3Wi#34G=n*fR8?cHlR*IY&vPSG<0r5J9=QTCnJM~W!3KwnJw7zeNQC|aLG zPV*f(mqd?MQvWo;6H{iwHYuB<#gdp{841h+Wx<)F`yL8>|0F1blUQj;%uR?pV4gqp zT!kO}U7di}JbF35kbApqv17N&?^jg95{Z0NfbOxv1(okM(X3K@Jcbl(6os4z3!&U) zC6xvE>C(g5Fx%>>syLwwIvVHGuJl=0i~zs*_fpmq?!r*Rq2i? zUz5>~#6_?vFn*2Mdo^?aHg=H&6u62PPR1Q6oYQgc5QjG{V7ooULg;c&#z=n4$o2jf zISSSg87r(=Q#L%eM{466p{7fE6`$}c;9Aw7?z59q%sE#G&!i78@p&>hzJG5ZkrJ*)yiNoDA%hF7_xO|Gg)GOioTVsU%4nrwHi1I~uvUoY8dZYVeGVg%tCA zaUt91wmVD7XnsHAYyHeP*bfDPy1x_}cIDrGkuqd5r(YIzLwprME~)HHN(k`ykLBK> zQia;@)b;5X@3jNUAy=Re_^{i(W{b~@kNI*MMD~(KK!pClx2b(6?T$fsw)H*{1ww1S z{^Pn6 z398uV_!uw;^j<(Hi|G<`ucNrq@Gx$lEMDRji~1vQt#)B5djD=QL1doO)!mvF15x7m z@!iDPM8;8=x*S&V#EciKB$PRa~w^3a#7InS%ok)pakoHZw5Uz`Fd=4`q$uEO9{Ps1d z2lF)2nxK9M=hfX(t-+Va3|MYlGt^f)lNIUbHVUWNh5R&SGe1CF9HN=9d8pfNjr-n3 zy9iWDXXZlA(M$GY%xTO4jW;0t-Tbd3=#I)X76R_;eg=z{gCa)i_@YGRc<>x~{*&CX z3m4pKBOA`x`u-!2HxA-$*;-1R)x4>!;6v=W_q$=A2Yu$L$fnc3J3b4CFX3!=A5phY z`@9I1?DNLqYHr_I=#{Di!+s3EJj^r!U7i@U=mCv|aTVG@KX1+lciDpAYOszzOx+%< zL@J8k(RQ`_%yD`9)9~k;uOOgY*h<$xx7k0oAloUG?J+Vb4zY6^vkWO%ilB`Br$ZY6 zZ^Jodi3V}P6db!O@3xtp+ge|5ego*AF1juBGiQf`0P%Z$o_oI^vO&MAWVWt*6-mr6 z)b1qJ4CS#&)ss-5A8=$G6Fl{6nD7wf-3SF!Q9f*__xWsM236tF+;=hD zXZbJ&u)R+qF7LW1CN%TvbPXf93w7${F5rh*91pP{@|w<7Ca3u`zn}1MdNtwEi|>$) z9nwgFgYl3}&1~{;mpP=ElYg34G4%Gz7)NvLzlt->#vsd;H?xb#S<|HX1Cw3T5IsA+ zeichml9!P@ym<=0%0saZqBvAcKeLj!q9tq6sgoCcR zS&hc}o77fVXYk)es8EGZ-^_v^zT@mb0h|;nZD{IVFeq52%Ad3oJLs#wkxWb%`aX^H z2dg?OR`G^%yN^-B=Es?ujBP?v`$5r3n7S)6^es}Ldi^*sSry?fJy&ZDh;vDh=)v*8hc~Xpk(e zuJTgiPOQ~kUFc1s&GV-Ue)EbH2}-U#DLKnE+y)7=f2uBGI!wlz%LS4uzwV5oDa04u zu^PAV?vM14Pn4TBsuh|;yl0R{D3uk@i~Ekx;@O3<)2Z4d9}V{w0~9q@vBcmWuQCT zLLJk7{qRAi3(}~pTO5~n(Oe1Jj%Nn(^vS^WW$^P{_LyGlUY+!#dSUC9ayoMx>~Dgb z+B{ZYbRBh$LN`MdgD_jR1oE1S|BjpINfht=-tq2};Z1r-J4p;n0qIfSS()G+$7Obm zKF!H$|5@5J53ayffwR^|a4^!b_dnnser))ZfGdUEuaq#WoPHzwYSv}6f+s--w5gPPkYMY_l|q(J(b zsKUQHb_AOsr~j77YZ(knzP5qj6@RuxN(`ETF@!}I#bDarHs0bng_4S1{23`c(-?C8 z%yZk@FXZAEYQp~+R_W*-9sY4~P5{GVlgN&2URUK17dqJylpd<#Id-=pyOg-dw+;oJ$yNaf{bSB0FRv5-$4Y(Wa3^a>f^=lHFyuNT*h-s{I+Q) zlM%VzakcPq6loQUu$W@am9kj2Gw2)3b8SyGvWRZd{C8I<`)+;D<;DKM)|b?M!%)Cy zxSg|Sl*j!+8C2TE#{aSQ1yiO9S+ zJ>2o^eWcDRn*dQBz|f|TC!G~S?|tLE6hDG!b6Y&jY^^{cv6dRSU2ifv)IJ%$S_p!V zl=#4QD^P^T4zNax7@8^d)E5 z**k9n`!43a0j#Co*J~yH?uPGc4d8_x@h}b2V&2Whe|jG#s&FRv)!9F#e6XDwxrLoH z=Yr?J>qG3ay1Nx*S)DU&42q#YR)o6DKpHtZowq#N9^(gm`t`5JqMhA}269t+e3BZW zf0jkCFp`B&4EGzl@!*_9Lbnk{75hY=RN)nw#Erxv{_6UaqOGa>h6K4D)V~hCU~2Hv zdqvVf`>M+vPR!^b&Hn%gO|@{Ft{3ysDZx7HVJ)Qj+VDbp`U?MHpt0qb{oKR~N=H+1gVWN%@swrdFyP zQ@dx>Nvg)cLHE|`-j32}{Bg|WVO4z%z5F--3bG`@<-fZEP*VRiS4wKz5Wh?4`|-Go z&zyF=W0iseviLR=a(?CIYHx^2Ug z<{eB?Frm>cX-kiE#wo8C%+W0MTJTJ;*m0*&iY#mn4|w@gVF7=N$%1~!3Z^7OENmFVjun-Dfd4(7v4{7Y&dcD-_g=Q@N3~^$6Tu8J6MgA>n>E(N^?Bdy7zv1M*P+;o0&bOVbQ2iUq_CFkgR||hI z*1}lwOnIQq*cD&tldy@z3S?CSsCV0lXt7w;$&p}wnVCUk9S}D!7N)yW+!Q%~QTNrD zIErs}=Ngoi#5lcj!bdbphk+EGP3{ur7`}hJWZ$W1k{xlI{WdCoSsJpi1RQ^Iz#@qwSiy7T3ubLCDWsZ`TV zeyg-`;8FJgluYFRtxf9Fy$zrsL>K;wZRkKdTjLGel|P<6wkvPpU3%)(L|y*0Yy3Bn z62^|n!Vf}Lc4+hrX+A2z*FqX?`B+uwgYUb0=hVxdy4`p3vW}r`#a@>C`MjEmnJzej?mmO~)|#YKfhv_w*g!yDc2 zqTgI04a?lw~I?|5f{zyTyo@lfJn` zel`U?SZ5ZZ32cGzC)iYDgzILao>4;!CUYJXobn`fYDJy$%3ri-KkmsEniwSQiCP$l zQxXJ!EkL)VU!GYSBu~-f#I!fZO|r_-M#c>$e8PqrVW~73lrhPU6^gd2!#oaWipI4Y z{vJfcp!4|wB4;Inw;{Ch;D}7YLg#My^WU0PUd0oRs}s;X_!3lE2}~zC%GX>F9k64= zx@%t?no@pe8#A&GYdOxG5(m>{61bdPQ+@}sKDrkB(rUvIW&|JOjjuP&$vYj)8Pg=7|nI4TsNSZ+k4XYU&@= zfO#^bK^5N5rcRIKJYKvYR`bxS{F|c#4u%^P(f9Y3|5YYQ!ag=)dY^@V^ed#folq28 zZ@Dg#6y{aMzH$G&@5b#v#ztLnHXb;UuIX*n^cICa*xfBE^)bc1EPP4mu`c%O2+Kxw zdzZ-Px0dXy>ikb|bh=b;1#9@%yr~}NTBRy5JMWAzD07^z@F9p<3xpHlWM7#rbveIV zAM!-@18;TOK@xC^noV54so5m^QHP>UeEj~l@iLCi8Vi+5r_cMl1SP+F^?Ux$yaWUu=Dzs%Iys6Eysm{$oMLQ15beb>D@^$Ggujj{E*2tE~ zOqcSzH%uJ-K2O(d&u|FDP^jSCe8~O0+;6weVJJ5%A}=cLTVU3lyRvapw}#8t=NIa@ z3`Xkdb+^3+xz;-_3Ui<<{#pG8IjQ{zr3PizPo7Wq{VxwZSir!KJ&QnoNUz!bR|Eut zfE5-3H=+`Z3oLLsiuo*z=cDL|`c?4KQns*KakY9||2!Gd(Uk}(s&+AREzz}3DzcC9 z8`$n%!sYG*lMOXr@+ozG#>JPd-^~x5-wL}&H?-aGL-Bl=wZG?MIr@>RD^jyx@RZM`g4Z(eaix&)&$p&>P3ndV0xgo&P?{!^_!fmC40Ti|h$$`Jltf-ogwrlR07`q-M4^J3uAM@P$UZY|BdLA*rwJM89c8@$?t*o<-mX95Z zp|B-d_nl0-NQ`Y&Oj^5dkFung#UKIf4ktnUGU#7_ + + + + + + + + + + + + diff --git a/website/static/img/agora_studio.png b/website/static/img/agora_studio.png new file mode 100644 index 0000000000000000000000000000000000000000..48b07b877504267eb302aa7ebf9e0b49540ed686 GIT binary patch literal 133985 zcmc$Gc|6qn_y6EVU0ai)vK50CTiN#((GW^TBufg(zD#z;Z85G;BgzsjC}Yo-A*PTk zO4)ZpWXZl}`<{2*`?<~B-~YdP-22F#Uhmg(mgn<%&g<=lp4Qi9|9$uGC=`kvds5R7 zh1z->g<>k%%naY`d;B~Ag<|-_{`m3J9){Y-_FzvOmp`N=FE4Re@(}#>UHsi*U8bYk zLo^1^pS%e|+}vT$aYDOaKg~Tk#aM7s$Xl)^?)b_67bQn4jdR%d5#=pzu5ar2z2%Qb z`~EQbv*1bit$8OQ&C1fx?^lKtW(8zisAwJX{-;k^4DHWy-d?`fUhGq~d}@b8)7jc1 zCUW^{eOHL21vl+jeizkinGM711sy5RA0IO-y8mI@w?Wn=ow3r;Xi&flms_ThuvagZ#sk!I3 zF%H)@HWr$R^FFN2=IYP=Ut|>NyE-40bTCZ{iHTPEV@CVsNuJqr99C22bB<@m+Cpxz zE0%~~z00rId!vO#A|qthu=u4g_w`n;=keE59iE%+n;W-F8t~hkS$od>^@Mr)E*R8^Qe(`!gzOgwvAUn@ARa_N`j0* z;`HrsyYT@V?oNxhxrYj?+nBj7IWn^qzFxX$%ZNSnGThviS}9G`dKbRqlf?}o=jTQ# zd{dabedC_Nc#`@fA@VJvzTZxr8>!xeST&1`$%##xjMjdMAC$`1rhL~J z>|QOZo1GsUS~Nw&_Wpl=98-^upiq5XO?Wjvju11U{-LtT$#9~XaDN=JEk4$aOx~?s zX?dcuNT|P@Eu=ytoS3~B5ajFRlqq$NgJ$zo-$OeYP%?cJC6ieZ0&2cKIGn;Y^F>!% z^_}Q*RqfriGD7QKINXM~eUjec57kw!er%;cl9F2;tV!jHuKL=B*{d{g3PX z#f3ODDYv}1iwlQuQxxw1R$u zqTAK$*(@|qQkA8_PG>LomId%|hE&v_5{=^2uFTS-j29ALI!W)m@WQFHFnfKeS|iXc zDvOo&S)aqmvrn8l!^EO$H0tY)qjLkIq{?j$$99$G=X+N4yz<-lHa$>n;4(8$drn1` zALqQS^)$@Ff4VA&6dQS>((v)^l$y)#0lkao%zZ~vHYQj!mX4l#&P1~)K?d3I&1U?o zgEB#4GJ=uBFK?8t!(;Q852s!G)-hkeC8l3$@{;zLg!@+D#^1?fzFX0ec0J4k}L4Ml(QKiT_EGX)ej`E<5#Inf^oUO$f%9oVOLsxqjEs-xL8YF1fS$%R5 zg5E?^ukpg%Q^B>bi!!r44HDUx5652cA8kr|2pd>_$0}7iWj!K5`()rdh%v_(x{}40 zU*_k}nb@lHqE&Pj6JA>#jxAM|DK983UDeQks4Tai|2LW^k7(kQQ7G>BEtzKVb^eEi zEO3-B;@`rE&kCKp&)?^u^;O?9ume?;GC>(*#^KcX(2JUtk6b^r?>55WACbu+Iw3T3 zqo&RxzU7(^*ch!_)f|cNh>pEpw>pP!H9vRXnReWM}v+;>(o zC<&a2N*eDj@tQ1G4_sYJ_GF`3fhv^#!^Da{GlRi({`sEOH~*@u)=yORaT%dS0m2D8 zaJ&5LE3zFHxaPY=ll|T9!G$>7{Pd?Q`I(-Z>86!|$l}d#>Lia*7Hx;#rX+e8-eEt9OChlj1YG362$oFq5Z-dUWVk!J0j zlgRdz&CJcaJMi6~iQal6F?#M!t@ucQHs}5Hqxbz7olYM8PV~kI8S4}|k|fY>Ib^bf zE?sbm|9%~f%83}IWC``Jp{F|t71vHMB;K<|BnUQc~_jpM9opUf?y^BPFO2SSm%A z>C*M&1_qS0)UArooiekZ8~8o5*9||16v2JW(lFU4dg_je!GJ3RXhJstC(jUsq*fuJ&3>hU19Ub8Z* zLE`LCf1Urr@^I2(2JH@V7?@L-P*yR`_%%)2^~MJNMlzXP4OF^ z%UD`cY`6g!ozQOcK>h1goIy(!4N9~R$-M9U#Lzdc)`ss^(bcYdku^3Exw)5iX6hz{ zt?eG$$Gt*NeO(8~b`QO7VJq(E2RIjE2y}zV5@EhiBiT zrz4UAvT4UAu&~H6^syPAJC}W%7{rI>1RDrj{z>E%RJk_p_n(>1Ak33$+9-RSOrD4! z`so(c#d-mgoExDpDZRu>cij4e~F}C1|CySE`%#5oz5Y*&W_d zXb!4t?%C}Vju~}bKHuj?>^ck0CI2%T@vrVzigOSS$9aFk1O>+vn4nCg()%|L3D{K< zC94RO>pmIFvlJWjW!uihwWoM>J~cE8wTe0bK?w(vh@y$KFm zQqpTQ!r=n;2x+C5ai227H4mf+(>lxc$G8*O^>yVy8)%!M5KxTf^sH%$WI*81Uw4IZ zy^9;tLXr~QFCJO!KqslW4-$vDXr?5{?7YW;BG{j4ov(;p)=p93zF>@-pX%u7xK9sm zr(|5Fmj5^B-ZEo7^U_<>mLMg6)*klx|EJ~+atsb^AeNR z1$*T9IC@R!6Znt*1cL8ocP*6tksGpr(V!PM!Sp)=#|)*5^LoiVsqeagpkN z-FpvEJF;5v06l7lBbfQ;2yHi5}n@FC%!M zb0yC9BffjmW=Ji?%-bib=CWV!Vw9TS!s;|A!_&&Nh--D#2wCSw*<|m7Sk8MBeYXJ> zt$G(-_R$fqv>wD)j_W9;Y@ZCLwx&7dX5Qg!|mie|<==g%1# znw5{geECvH@4tLd=+tBOajBy#>(|krU}(Yx&?h1E`s5TV;*Wb}lP3t|!Zwl#FHX;+nRu7tJu#J?VX*Pk2-8<65C0ZYI9W@MPs3DuHs-MGe^4 zl3p0LokE0R+h(UuV+eoM+b8NuY9SXXPG$)gu>1=sjXH?lLI?`98C@rO zyUXq__|M|kk;Sz-3p-1)R*u<9!bWFph74Qrme0q0%|^Isj!y`;MO=DFAX4x{ou97P zX?YHyBpQm1@+TW(r#U4oD%{~exUPtHVd2T-Yxu2*Arj4Of`EC1<)$Q@-oM3Nl z%`Q{Q&9s>qkdX9Q_xC`E3TVC*30wQY+V2+XR2vGv>Ch8!IM4+l7V0Dy?Mn6(l?OTs z5!C~B^x5us1L^H3TH?(udC@_Mu`0Sn(htex??Gs{6<`a?7`ocf!~lsqML|TeZ$sDb z4(p4)JTyUEYFl4hIj2J>88e$KQr+k^Ci62gVGBiPE8s-)JndKb%a zl$-5xv8Rm7*XOf0s-g_kmwS_f)|cspJLs@%c31?+wfy|*j9Y@ z$jZ8xP(KCczbO%0{B3D^Ajr2LJVoD#`4FPuz$D!~N)fm`q$1Q0b@_vXUfPwN^bwXI z1qg7e04PZfJq>aR?0&%%q9NbO&yC(ivlqcdX%Ms3!u@!NS>69<%qB!4G3%|Ao}T`c zOkTJ90Rifu8`MQ<+armAn_|t)>((cv25w&^Bn~a ztwD-z{hF2(TE6ac!~tLT*>vqfPafcZ-Nma}xrV4&x;oC|$DSKy-&e)@D9U7ucfVP! ztrL~ul-JA4qu)n6h<$3)2xS6~o<6C`rdyP=HAblabnjxe1f2$OEkqaw2iDc9Jd4iF z_GD%K@+WcWuPd^<>8U`G7eNpC^4;jj=m@Ln%R`@`(4a3(UFDJa7czY1?t8}`39N*n8JM`rCF0JcDmZMnNqsl#e?P11L&@dQYO+7SZ6K0 zEeNGjE5M}!J^ihgA~_faRW^BY#o%V{=G4wA^t5B6Y+@|Uw1l+&DPdH6mQ&|Z5QMdL zMKR97GCMC1b$%}^FQHd*^@tKr<&CT^k(qVXyl{rH>BAdbtdA=m5A_;dbe1FZx6S!a zB9zQfeJ9*ZA$sSN$*s|J6tv_3W@k@a58(f)E`#PgXX;F9L%0m0{0kT`IX4(NWr9g5wujotqavUVOdTf(j}DmOxKI5 z1&A^NS#p-n0anux&sN9tol3T$lZeEJDP~qo-y(>SYvYR?G~t(HkL-5uk7liHS%D{u zW6n`q0i=^__8+TO!THC)s@*soe+03)K_X{M7VOrlQMTkiV=I1)om_!V+`4Ph#Y0B1 z%K&yO|FLPsQ43TH&b$?;sk#e&|Gsmup63b8)$Rt`+b?QA;(ba7v}+!s-J}tbXvM`Y z)F0r~S(ZzeBsn|zhBjGs%j)H&Pp)Xx)|u%P9oYg^xRWBE0MgzHrNbVoKhnLH*7#nw zLCD?AZED-ovADLPL#NVb>yZOJv5#{&JbOJwXsZs+Vn`IMTPJZvRQRCFyill1{n>6!*X! zbS|))#)DI$(9>i*d&R4F@nC16)5?J0#!_{P8O??XN@t@XWUsM@RG10%3#X`9{TXms zk*~*yjvyXr0HP>LLDfnm8eAUI8R33l99t|Ql$bW=3%ZqNPOCBq)IR@iF7#qd1Yy4d zkyxZ%Y5pIpA_C3>s7iIipUOSV0h%8a%rY}L1; zi(=?B|4<0z{0D5KYz5I95D%H|g_KYtRGZi7r}IaqQvzjFR_Fi7lcg@M&flOF4^0Ky z#H?ch2PG7&Oq96jYWnXLDn#bYmlG;;?G0GWq(@QPUPN={J$C9GE$LmGXpCK6Z;+5q z7mcb(oDAAn*;tG*b6&33jud!!Z-RCm+j9s2v`Y&_!Zjw;=+xWkhi92!BtJc@+r}ej zM7XxjUslL@UpJ#iAGG2Gu6ACd@5M@&00y{f4NSibN6M8cfe}do>6?@Lhe6fU1aTD5Qp-86AK{q|fcHQTtB~@z~i^COVW@fGx(YwqM zDI=sG0?VV(NgS8ZId^ZNr)yH}3+XP>2pjc1uUI;LTj%*VG73JZ4-XD9VilD>FHx;u z3k?=tKQZvny8w(^6qr6=eYpwKwN1bOw>!W0+}ZP+kjXO{ezStiKTbSsxOTO=@TtuC zkX_^X*OaTsc=3YFs-BDiO_?iN{6e2eFHDz~{Jl05tI?Pn9_@Y`Ennv2eAe5BBj-yO z1y@WR7Gitt^3H<~rtaLj=zRwF>JgqLqxcN2TaoR0k{nIC^xa0%2KLfGnhJx1n)WMx3r4*;7Sdsfe$dnHlT+Z>C}%2KL;bzl#Oj(`)kbP z$r)mcM9xux*g^Bz<<%o&w6elV4%tBMT)@VkWM7{#jy72we%3nI+jNYUSFGf2pJzek zs3Ld$pITa4Tts7wfZpa+v?KV6*Ax9Fd~)!#4d$G#x0y^R6M+y8{_hJr!w4~1CgR*K z_H>PVaScc`3^uiKge!eiFH^;;B0POGym4f1OC}u>e?gGg)7%!t+P{aq*F;IhX-01P_pw~&_rxgX7s)SZxZaUVb7#3O$lFQ?D>KZgXV2gQF5%33DVw~# zaroo7SDxxgk>}~E`&wXkD)V`9?nqGnnxtCzd&|b@vPlDR?yxYkkY6T-AAbzC{r(-3 zOtvyz*~E29%3b(yBQM-bxEaTP=Y?CALl2H&HoAA#XnnU@A#-cMjaiRUH*g#lB!9@v%0<&#Re`l)zcn4=QB9V@lKWXbS4gO zGse+3Zq$qmtLlGGZ`TE69C6t{@C5EvE}q3z|1Az*4X;upeWZ(~E1(jo%n$URg>Z*e zeZ$&9;CU=0gG3wiR92%8y(>`mP;I@faowcB(w4ncB^5(lk%-R)x}yJJKRy@FA8X-$o2uGF465NtR$QjDAL$3RR`6M1pXwNa`i^m7IwbH-+kiDG*a2OA*E%Y2MT zlg@N7^mDf1X52if1x;NiljGsW<3C8Cfy{L`J&Q7Wg~XXZf$Dfp7JM~IP8VpVFwva3 z%`Xa)dpu&*3bBm{d0YqE>x@%EURIPge{qe0YgRp-L|0np`l) z6%|aN;&z{Fc7ou2N1rl1APaMtfA|_}kyN?Yt*-`aArt?eBcCoP7Ct1yl*7(;cEkHs zR8)HJ4|J8Uf!%%`Y0Ku zckgI#-z8Su_asd6``2Oh#o`lVv@iEqE#5X`LAkbAoX6DzDTx;VPw%M-r)QmR5J*;= z^qw`=l1B?M+Gb#k$r-Zpp@;fX^o%kt01=r!QjE1|P-ZmQm3J;=Fc+p*|Sn)ZnrhJQvkM`|cFA z7}>B8cug*#H|Yv&A|GP7NP;^o{X88l48H~F`~5-va0n^kF;9bdqnlXK~b2OPj z*zv=lpf{2+MWS$FzsbZWgw)ltxHdpfG6}hJ2(}-4OwQVM>$4`E zIO!XQM!rd)K?b=83E5I7Gn@O7ZUnKK*Ab2$YzQ{_29zwEmM!DTCDMA&=gIU{_A6*UqG?(i-6%E2}N@ z0YN?_F03SgV#cedSpB(xpnUq%eMe6B_~Op8GP1?p%S~WgzX7suvoxE5108xMs6(vk z+RDfCr;)uxl)5OJ+I9}(n!7o2hw~vT@Yf30#d({Yf{>1^4mup93Jql zWN5bd3@xvlxFaUh>|JaXEl$C+nB$7Xcoo{qN?oA3fz1|yW_P$dKcG%9j;i($A@$BE6;UcS_@$(yAyjB*WTx~*9~jY z?At_$W1T_vH158b(p`j5QI-`pSbd}BPxQX9RQ{-pvT6h#*?sG?57r_9B9f)gVW&v3 z{mxswB>!amuYx2acUWY32*dNO;#Hthje*$ms(KeIs!bS7zt9_u@xnDP148O+^26k{LQ6e>aX%MxZZL;IrEkt zm9fYgpTI@N&5P%npW|7nvCqk%n>YN`gHDan_BWupNi75Y_4M1fZ$)AwK#mD1BMS=) zUUjxIPZekk_3;4^WI)5>S|h2n5YIwY+9~2=4iQYJ+z5I|ehVT%Uy3Ob^SLmvo!}v7 z;64Tsz(L2P1U)E%tOOmMoXn#J3$TUA@=vd0Ey@cD@F(b;+IAT^mPspq0OMuUTneXh zeoht>@c{)!v(2C5+JG%uam`+DWq3>k2gno;^&|}n9lkgo2)WO&0Jx>mw76z~QG#@$ zN~*jt5f8LhPZEUup_w=SFEEur2gsi^va@qHLk3cpP4+ zrV7G;p)CbU>{fg=9d%Jf5t6F~(0EREJ~i_Pg!Ibb3Ecb}x;V+fLPm|haL9hOKm~rC z!0=shNRiiBZ4W5Ji=Q z-MKnjoW)5&s^{D$kBVTCz@r&7_Vozh0ZuOdeFTv20+gA$j;dh454L%|FBg0KG}@Ja zRRLDlJ+S~8gP2GIcOMd~$Bg4N4@vdTU>T-7eo!H5wFna?nu1D@12(n$48IE@u(n#i zv3f9lG!~o@V~SHNAzNHq?DpEIL)`wbMeWzkID9=xx`fTrclRLi3=f{H9AvY&bazlg zY`F*v5GY1kwI_8n8=|g71CHcPedY{$$qU+tqzz$b;(nj4!nHF*WA(G8&P;XYrC;Z5 z`RfEW>wQfy@tjDpq+QUpC0CM*B0k_iO*UrvW&+>cr8P{2QZ!?Z7{qhJW2jM}>gOeo z-h?e1AH}_TNlr`}w1h=|nbU+R3S)qE*CxeBU5LG$AC?_tPcn-tJuDU?tey}xfL@&J zS3|XJLNrkN&>L1M)u5fXynZ6YpNfasDoV1;*L~~tewmTrA)_<%7NZe6_yKdy#;aPW ztm!jGL1g(cMk8jh#?M;{_2CaVw~Xh;+G3C&PVE&S=z%n{-TEiNwNFTO^vlQOB1_cj zb&R0S%-ndr;0XZ{rMN*&QY6M_LcNQN&kacF167u0u=3geKPD zYaMWMlHLpmef_o0K;=7wTd?Z^q)rd_QbxjC#9FBXaE+5eUNmkF%*bNMct}<*pIj-4 z5Y#AZrm0}8PTVHdk^H4=4)PXZYCa=dOHJU*LOt3Gk@C!5RY2qE3) z_XqE09v67CuLj6VxJGz)5z0{xt*NkpvCgO~>wp z9Kz1r!YBaPv>Cy&Ww$`FG-12|`AlS7)US=V;c2@aF-`F#)ogsF{Z6svU<^+IJcs%*+HIytk#RR&;9MmY+>hB?8` zW=L$Spjw~<+qSrt4nX|NM?BW$6qGAyyRYA{0`1C-KL}>&0Mt`9*a=6Vt+#KnJ@QHQ zk+i63vMnU4^gxYI=J5|72T0J$_>GzhG%~dnI4gN_rPZjp0Me-i5CH1=Qg`*Fl?%wP z5&rYJ1AD}pzLKG`c({LMBn0GNb>T*>So`^vq?5U@0-xZZ@69jZ#6TGecGHv(R$DH_ zL*0V5yO=OoWBUFYAsPy*zHBhZQz(#2F;Y5*uXZ!#3~6C4JzI9Kx5$oeko2crrImgnAJG0Ws3Xn>DQn==LU`-twQAY@vyNOA1foIf{E|>g$B*%Q{ zc3O}G_1ptN)0Wk3!UoPYdF=itBH>RfE`Q0IXXmvOuB+E1i7w~;ZFji)E-G@#Bo6~z z^&P}DvlbN=v%B4Q7I(R6i;Y ziUYT8Ae@gVw0JVSPa#H9eFDDX9P2+@Kc-C^Tx~3Z20)p@AKu&GK z?hT6I<`X~x`a6LD#7IGX_0N(Sx3Q#549jAla!zC~ToL5Nt{aeg^$igOND^7TiBSO< zl@GNvX5f7`xx$3)i5heTL+jUmCi_~p;xz)0o5K#N|Mm9?Yl{gOgZiG;;3|!iGoRJG z%~2>qxin^Z8Iyg={mej&%(M!2{WxL|kg{$2^4cBes-yy^)v#G`y+GYntXxxKm)FiR z@oLdR_kT)=T2U-gqe42h&{v?b8$usN?OC}(>OJLtY3xqp+%R@sp9F;hK`)OdEDfa# zH|htYxI@AH%41`Fg^3Y?m`jxB3b9+X`(Bn5Rt{&Ru4 zlz$6V6Q=lJO$vBP@t}58m(Qrr|H_N0(;S#~a0@)o8Yov|P!OTW&WTxG$5;ZkvQM19oidrWz^^58`_?0O)n7mAIX zG-QIuKl4-#$y0*IVR$=t-n{i;?=N%cm_3sO%i^cfFq677A;4lmu1)?t2L4hxl)OmiU+E2r2paQK*tkcs8Vv5;qr<BQqRB&gDW#k;^3hC@EUIZ8w6LIOceQh%ZR6M>Az5# zBg>IUfmvJHCqyJvu8*K+VgbJf0zBXl zbpdg@+I-!`ofhr*Zi61L-T;+p&49b}aERck z8I7pb^CXbZW7slRj%*gH0Al|1YFu+B7GXbXA|;3H3}CV$StIyMvpx>W3>3;S&H%HV zhS?CUQLTyca98Z#g-yIS*k@n*b`87kMe02{V@X-b_e=0?Py%)ZwI3)tWqL zgXKsi#t)`o*X{o$qDRG<2|XA0A(1*;bG9`|Zzc~Lfl<<^8HlPQUAQ(nmWi=EHFJcd zq2q2q(x|C%BYwk#VG8QMp%O%{qp2P39UaP0i>XBMIRiSBDN5D&ffT8SzMERuza9P~ zXIVUErruKb4==zrYqLtn|{qQ%u z+74WtV+_O^teH{OZZS*YdpRNl3b_heEP1~W$9q0iBz95M z4sb@sP3UR8_EuHDJD3YNbY*lJBEJQ`Oa5j0%3r2mc`}#z&$|Ft5=PB3QC&!LY7-$9 zRFfIb;Fr$6EEMLj4j~-=H1Po!LMu@hhewvlCJU&rRha!^q7~6X!qSysBon zTHicLFOhtb-WF|+|HhvCi1GKkr;nZ8EV5NpNy#8EsB3r}BOP2bSht)w6KLM*A#!Su zM@XIOmzA#G`pg(>s~gF;&WLk!*1Z^yO&hy|k;}LcW24OzR<&lxBZMVk{`Zglv7I~j z{`jxtpHE3EDk`#@{66jbIkBRAtSP7V%zJ#*r}k6=m3WEtsWKPQo>qHe~rKQD0voS1-8}irI#@DA^&-tF-~9TNp@%(_uOZLR9b%gQj32}_zcPe3om|~Vl`&YoFLR$if4=^|0e{a< z?jU3}o%nnC0>l^;<9*vH!JG`Q(j4w$?B7kIZdcKfg(ixrjU>ud3?r z9Ai_GnsuDipkE+6glw9*+p4+e2MkRzK^PrqIRcMXHuzw6Z?>)BaZ?P1~DG@eA^ z#v0Dn4$rbMX(LDdWxWROF+Fp_&o7L0nYB)mRh*oh@?MCS5KsEXVC;|Cn8wt;7%#Yf z{kn`H`0$r!?YrEm$^ZJ`GGfrP2cj)F9q&^(u!g$rWkv^H{qH1yKUhc%{`+WG_amzv zWMX0}u`^w0K#Y0bAjXD=jg3tlYaa5;9!be=HQ=tF-<-b<6O6v?>$BpW^0jhDEFxil zU}|(`p2lg010!_jV!e56@3{lf5s)vBnhIbsW69bV$8}R0F2Y~jr|kUoXxMlWmQykPMV}J)di$D*aCw{r!-)R_W0v{2%29PyF~gmO-bMd(ry6(b+OtP-G;4uu?<`{ zriIm0Q&Xl082$33tnI5b)_>9f3CkM9!zc?)jIu7t$fbjTc)ZSl4t}0RGl`~1{*6Fn z5yT{iz$6T+Dzz_;Ax`hOCtl0V^ej5t?AJ|_M&oJXOXrEv0d^bHYsw$vk>e!3fu~|( zbW(!ppv(9CLW<5$L#7~x90J_(pl+cGAXc$wTVReb_$M0vw%@aR^c!sMqEe}tKI=!q zklvW^qheFHfU>hx@Ox=?n24ATgT3mxD@PsiPnsLDU@C{oc+Tp#qTcl^@!IbQS~i&2 zn96T_AIlCJz8Jzkw3hd)QH_$6-5$ikH@hv<{2>CuLwS2J%}q03Y8T(V7_UNHVDDjL zx8hXN6>Chd7eRv4GOTFT_9> zI0VIRE@WNJprDO8a17%t0x=ETJO6qyKDWNUUPkAoj=%m{_)at@aMU3r!7|5L`Iz85 z$b)||iJ|=HfVe^HM?xCP?E8Ts9c-}MI9GrjpFV#pXC#!!e`FHw|1a2nTB<=y^qJ%l z_?i3%Uh@i^vf)8Kd|^22;NYP0o1#)JTTv87E(GokJNo+iju!O>t{5O*Gh`b5&6Y8b zV1GnVy1@Swv+p}?ujMNTHx#xym^x9|#wD#2u7isTODp%7?=rm>wlG=Fb{AS}a z$w*D9-(QjC0ejt^5BH<}DzINHCq*YVsl40J6Z^Cis&G5w4^io%9yVY2OqJG+)dk+% zI)K$lV>j7c$1;Sz&V0auF)DP{pT{6@NU*2Rr0TD{b)EiQ8!psbCc*+g8kHa`X#`*8jD3N~^0r19lO!q@3N=njzHk()iGe!h&rt1-+A3dm-BF zJxS0HnQxY4C`|lB%SNsSn!7A^n-K}=H@_tH4ir(mc z7p7oYhRt-Sn7#0&+QJH*!!B*KzI?3TU*SlcZ4sH=BCsEE~6Du(c3S7hQs@szv-IE*b};kWx+otclE@yg9)-bXI=cTr z=;Q;gY5;6b+>jo64<0{$d?DAwKQP8d1<`EDS8HE5w)(4YbirRlN?4Y4ZBoJ-%8X%B zg%`H58^6-Lv?^qsXZfXiQ{|1XAGV)}IKz8r&5b*!c3*i}M4M57!}8L>+)tM`9r`qV zm!0D{CV8mwXu@Rbw&4=>{x0J^@-K0WLPIed-M$U_ju$$9Z*#b3X?enN4<$NW;iLxj z?Qo%7o9mvDkuO9&60d$k;+F9vza8KHu3Bhx*uVY6jTQ0VIDFq^M-%))X9o`Vara48 z23-8Ix+Hz*Tc_UzGsPy%C-x*R zAE+lp?+ImZExI+mY(T=uJ=_12cgT;n{UMGe$%-P0FI{Dwd&j2T<19j1ztw>9=z@2k z**{$o0}FoLb4;*MNM5sEWcL@IoN3vrT}@sCtKJ1%K`oo8k9qz5cesij3jLm4+0?M( zOJwGrRAp`l)91`GO+sbTt}SQ%#E-(SuJECm zb3b>qsP->AGYHoc%+4fP7MUH;{>@_i&D)D_ig`O(6rDoHq4!rtR=gvjkzu2 zE_=`{@guSQOGPTV?aHf5%LMOTUK*}m%P;v~-hE*7UCdf1E%|$+mZNLu&CU94iEZMt z+5V@Vx+$ncYV+ovxr$OGw#}9P{_=jCxTjcd@WkSmfkasw9i8Ox%^yt}$91|68Z%xS zYETVUjwfBZe11*fOC&qj*^x<6`ExxwT^r_{tD8*BRt}4#i4HNcY+JltvbFWbvgX&z zPe*JAm)AY}O9KzOa!noVW6|`!F}4;i%reJS@71}@GKrOVg!rzb#dAFJsvTyJLs0*+ zi`?|87w0saH>;b=EQ{5yPj@7-q$Xl&>fc1LEeoehN`C)wR7z7s?~ZluOEuR6htkfF zl5Lxr#3iD2dNFasKp0-#mAA@Y1-~MOK9UJ#4~h!L=C|06OuyrLyxW7&P2}-RCt`0e za>X+Tq5K6EOmx%cxAF#XGI8vY*H}ceKYdj>By{r~rYBAF&S@>jJ-*NVyEglCE?HVB z$%~$R|J?n9KeMYedh4NZMVD&>N_p{{1TNlFKoHpRQp0GtIZ2cX zay$#vKGZ^9pB_ZnfGFUr0)TqEG$Me)g;*Tf`OMWhgyb2AYs-f^K319-I3`eHy`1eW_Q{KH(WdPxwIt z8n@=YzmFb%7hNejP|wuM_~-sGC=tAwr+oWP_nW(4;x{3HYXq#X}i5syl`>%hPn3xcO|D~XxG*Z;X;9`GQ zy!l|M_(q#}z`jEwALSm+8Ka|imH)QN_A==~$CYiR`8FNInnjslHKORprv%geVORH7 zP`wCJOhIqMkB5Jqr2g4-;<@ci<*nLXl}$@r!Iwh!clSu?-_|D14z?0=_$L{nJ1$%8 zg-ZL5a$#Z{-&#@A&8E`|IXiMcjigit6!fnzh)^TX@-!S%4xMc>vQp2@!vB`zwR}lu z(zx%S=DwEqnx|W&-6nvO$vhjnrD6}S{3s&qg(R4>P#n)+4?|@@N*1-6y`M}z07VA~ zyP%pEp%2F;maZoJohxCp+*}3f~ z2TOCG$1Z4$7;;@PFefAr9adE>Jr`p$oB{=M@_DFvSK;9G+t}Sn7`^11ROianh=&kG zJeq3z!y0U3ITKhDjyiI$?#T6-3aL!?!ag0_BfpjZw{g~S)AivG_fB99`OSXgV;t8` zknv&`O%Z>7^Gk4MZFPI4S|7eLpwECfMKpXdigDVP5f5)=198MCnmm=rxZ%#+wLv1_OSp>e)deklC(bQcTE<^ zG{|&mb@^o{?rk0d_r^zpV65v%sC(R(OFYQ9w8EN;4!(rQ|9nVxQS~Q=2Lomi( z75R4if?X$Z%#kBK>h2T?(`6?XzkjVVK_=wjVO8ys720t!dy#8MK-ve2f4j+2mWZ!7`l`-+Cf71Ag zQ%T<{?DI&nagzP({j%0fy?dlt6)an#IJD&dN<0R=$KPM>UfMtHZfU6eo^R(}rdxzr zf*m@3j@^>i9Q|^K!mD#=&>>=k~}wTJ|C9EdB2GScXxB*VU+DTB(NB%kTej76 zr@yPdUg=TAR4M)NU0zj1n_vOb&oCTt5Y9@UB^H#L-p&}9KlCC(A@r+=y>M18Z`SAV zYY)wtGDKS@jnS=*?AdwI_|jNSJ~rNFR@llT(n*UkrZ;8bu#Z zIQA((j8u+H4innq8q-ugwJ7v`H7R*ImVNx>jQ}Uk z?ZlO09mUHjp`|kex3q0KCWqc>De3U?jSCH&R|gA(E7BlH32esp=$Yx4y#Nwz7j$j@D6gx0845 zzbRndKgLVA&zSU^x)x?V>vg`RToIp}WCD8vYbnwDt`OGu1>dKaa^Y=+Wt}H!y5VWp zmr7JoZAso|Hn#NX2r0hVvS{cuzxATz`>i`a>B))&Y*iF@+Rtty!K`&#TV;B!ktG48 z8nXHA_$kwSSF;{2?N&rB9iDRN3!oYhLR=3#0a(yZP*ADWmdatt;XyMO^L`3DH})un z{n;j$cNd;81Rg(O9o6bW7$aCRcx|oEcX(GmVJoJW&o})Fs_Ujd4@`0%toFBZvC`<@ zbxGx_SGLiX2Rm!?syHfrt*g%Wo402NI|L_jF?qE(yXGI;FcL92PP%6i@pbI0P*!dp z5nmbrA#5I;CYNQGag-~JV5j=|(jrqUS8K-@p_h?U#{Hp0!|v9O)JxFxyV}m9D2jA4 znu7;IZPzECK_j$jt0==-<&7%g(6o;#pX0)h2?9Cu_4N|6x1D_^sB3ypmf-+b3lEbR zlO2PZNajh>sF~m)^SM~6N9f2C{SjvYCF_pL5JFh+2CD3YBgHgS>PyP`ucC4O6Q|7o z;{LiPi?e={Br5fp#Iw(AT)YeZN1c*^p$5 zxYI;YCKh~buiX9B+IpacG*i#3bR~_ct=K<7SGBxH`;^40#HSl&d`o`I-Wa{utM1*E zYS0chO#dp>GCZ_3*rNo^d(Ml6`{RQ}t%tW&%}O7w=Bf~f|FmQ(7ZZKjtL^A5A^JR* z`@q?L5v;u@y0z6*b919)%w;TH@h|pa$&WeucLg4DTkA?&oprLG@H(y}wEfaSp*eScw~2E5 z>jSnzd!{WC64FPXH&;4Bw%_w54u|p+?=16&4&VNEr6=GI*4pYC>HyU;)Rm&Kt-{M0 z!#LWKa=?`_qXN2v-zJwIBpCe-O9tU7Pki$yaj?mqJG(q5F`}wSS^|>t3`9imB~ncCNiJ zX|_m2zB!0#iOtpTOb_dbL|K$5gl@Vso6*t<9GjmsSOM-4tvpLXVfA$IRZBe!?}&gUWT?W)8E(<^nd#(Ats-A;$Jk;O&K0XV^&LCvpw+Pwy zWGrLNmL;L7?7L`Wn@KT~kX_bZwjyhY$}-ioDMa?fG$n-?TPEx8cz=H1Kl#JMd7k^+ z_jO(Ob)Pf&bqF_yT#B!k2DNEg>eiY6;z30(L~`BFkx{$~bWxB0&e9JaU-^*gEFg`x z4m&kW|7+-bt+I9Yx3*C8NnZ!5EG?3h22Snoe@@Z?2u_5zO}FnoyeSfVs!)sg@bJdYN_aWR#;A|Qz zNe@MvgQ#)DXKEVjVWeNxk(tuMA>q)2f};ql{0)@XK=RL;rH^`EVZn zU0YK#4~Y8$JX~&o4_`_Fj>f$svE}5E*kXVVGK!N@pvul)Druj8$YLkulwrb_kXp{-@Iy!yBQ3<9-sT;|qE36u(aO;;UC_)*AX!-ArfgJYBdQs|_noU2V0 zqlK+}Ig2WJz;SH-%P8Y0UgQ(YGQ7K(xpCRnGrQQ%pye11LoMHyqFCQA0-r}HO=NAR zmzQ>mqAVqGrsP3x7ox(P!BvG(jG>p)%Rc03f#_dhEcLEo{kx{r z!?xiz5z1yUN~_<;jet+hcgH3UA$Zn*-k2|9d0A zh((rb+2W>o>5YByvBykZk}*gYJ6Y{Daa&E7)pS#2F*cDTPlzw(f+kD!3dq)#+rgM{ z`-I@1rr3doC%?&uU!sY-ZK~WQPGQosDWe-b*G~ip&j*NqH|gY^Gs`9Zo^Y~ zaEZW6S(dF6XErpSHR87+;Sy&i`S>!W5~IM&x&1zkqkXJ<}-C77lik^A0*gbe>vPntO&?HxzWP3=`5(`q)xnC%CWhVbE#KAG{9rH;P(o4nJqf!(z{tHOT~nX)_2<>eb6 zYJHTtZaxHSl1*Jb;Gy%~W#g&{NFj6R^ zRrsF#&Uq%J7dk9*#?v6r#)4iEScZSiePHL^4vlYo?3grS>;t_J{$xhhj*n)>9WR4Acs{ zvzFR4Y=>iAVAAXF>w&Z!#A$#W5=eu`!!>BftN3fVQye%EO^76h1YWwIL0;IaFiPPlJUlIg0tBgkUD&rH)>yf#u*JszQnoa_KvNK@Q+%1Nr|S03urwgu$_ zDPOSEog?mk1#>NsJ)C0xSlP}d$+g_w@k$G0EF){oHjxxb4)_mseKaSaxCe$@Cz`6* z5U;UCt4s8V{UF3w{2}!2mf%!*UeUZ>l|W8<*-VGjI0Z_4cwC6FZU;J8_R!Oazdr1N+#NCi~P+N}Waf=wAV zV$4$wVvJtA(xRCFa@$$W+rZfl>D+T|aMg zb5T)E%AvvB3vJt}t&E}0aLr^tBPe~~b!NRL1+z>-be9IaJEeSP>!f9AmJdAPj(c>= zu=5CNNn(6#&eFEXh;xob>xM|{a4?S9c8`M=ROwcA;^GA$R0f5Xoi?G#Q5Hz%6=a3{nLjA{rW�Hb3N@C8UbRsXio1UqFP zmAmv7_nda6vc4)s*TEYV;?=fxF2}-+B~m}PNJLXBR4>Rhl=KaiOSZ(TLOQ8N|ADZ0 zn6(ijuRgqUit1TOjDerEzJSXAf!8CgkQ7m$y?jP)SFe5G<$d{tY1S%);$6}%g&qik zMzBYKx!96CVb{zqzXS*tN?Rpu<4xdM@(niNJ2Y11PBO>w(N0w&>@qH~BA@LlV|dp) zaV9+9I6C}5J2x~$EP^}Yw;@ zvuDR5$NxTo=~3x^oi3o7^xjVA>t?_rMh0CnYNvc!6fVNHj9-!u;?y7%-rFa|Yn#7^&F@nz*FL?md?CokQz`>lE#}qD)LuOh-r?^i_tI#zyZ7JG;FL%T36EF zS`|yz9BDb&p$5%>vP9CsnOvkDIT7S6GJk#0JFY7_DUJx&tQSGwcQ=0ybBdrhTfN|3cAJm)RIWH?^mB0Ky!p}5oEW-N|ENj- z)Q89L|FwTp`T?r`Rqy(UQ+C8p7QF>vM!(hK@?ZK(`vHH}2>ups>@ivzK#h16rdjq< zt3=xi1=ZKJgs~hEa_Tjs@2l$(p(yz4Ows}_3w{kS<9Y{Cl5OP!sn??|-ax8ZA_fzJ zzFP` zRI^3uisR8q(2t|=Q49fL^P7%xk&Oc&KaT27*&qqv^R;&Nj?Tg8v_3A8D5+uz+4x*g z#WHHQS+!!>@GX&t4(;Fw3c><1A(VTfxjTx#B@g=>G)dn0dly{s2QH|?bub$D1aFtK z`1TB?gH%Umut<>_{iu(BMc|v=z=_u?TX4uDL^!+Cu#zh~j{%4nB!Bqum58o!GI|e9 zA-yJflT$ZMFt6@yqS}aBH#`v*S9w1-&Yf#|!ExB(C{z0?-$~#UrxB?QmrhWe{yb73 zZPyh?-v7V{q?44u580#lrwMg2t8(S(&%nPf^y}i~s)=Zv4Spy`PQo6nexT;G;d#Sc z_Yw_M?>}1@CsVBSnfN(YFSnGT=bUqM=%5R^cpc~v{3p*hWKdX~6!#$? ziGR$hnrh%F4zbf~4bq3Jw zMmfvPuOiSrF|5Y$QmCp~aG?Bbejfu}1$i)?vUJVigRmpoAvhr5;Um!Axq-c(FM~es z7gXnRee2VwyIhSqc@_wDyLt_0C$=E|@Cq62*19KQa54J}WWgWA<^g|h-qvJR zk<5j#j#`n{!T95!D!Ta58UKk+;+a|Lf~~`rjiJeuAW9F3XC?s{4XRszGyH`bb0*|X zzN;S}Vd>6hzG9MsozO;>xThaEY+|lLpF_l9y=Zpx+#O=?m;+DB;Brh7t35tY7J^X#SB?rr5mT%3EM%^@sY{fNb7zSG$shc8_$A{U5E^YxBMG&ya(=B$jiakkG)NP81D(9TN}qcVOUBL+JrPO)J%BBWB6tww6ELWU6D_O|*FuZTh(@-$`Y@x>HU^2LCg zo?k?Q+EPC;mNItdGc%n1)A*nlEQ{(?#5EPR9{ay~f!2P1@ z3pK{Yp#bq$=H)^bgE7Y*ms%-cxIjtF=cM>jU=-c^@}YY5(rq{a*e=8ea&QcL-@0#G z|0qT6#llZAWy;MCSy#P>8o*cOQg;&A=|1=K9zVQ(q)OTe2@1Xgi}eBU{p|k`seTQ>1@nsbQBHl6+`?EQ-4CNhtwKzOLu%1A+@L2+nKC zDGn?m8)Kpos^~P<&g*>qq^!1651UWRqnH6d>d_*9Qgxsk^7QDy&tmX%XDcN244}=h za&$t*n4wFm3~=8mAXyf?%^EIIp+4pGD){ zAYUj#W#tWJleBJ*2;J6uTMt6wCe2z|`_g2_9-Wl>v88ny+T1Ku;!TmEiZ0HRj*-mJ zC+ek>c+uCDz95~g0l?jwC)P#!z{Aj9=`vbXiMT9C{5K2#z=oetal|>OiM3by$aH=Y zwuM*wtj$R6M}#qR=^0zToMJTRBrEnri?sT&-^10%(1Es0wGOm}NjO$~7*&7uz#@d9 z;lZ0Pb)*jfu`XP$7^r?QVK&ogXvOf{sJvYWy>(xdt{fq^mee~1Oc}u|Oy_tOULo*l z#AvEMrp@unD@TDf&QPA=0Ha%7hh;1uWoM6Xj$GfCjPsSGd19(2((y~V=Rkd~&XYqL z1uRg+42^meQS?k?^8_S11(E%FiSslBk- z2J7jYPoo!N;Wll`ZF=AjvE*Et`U_f?qu{qm^xoZ-^SRH&&_OUG6z!`G!R49&vSvU> z9BV?0qUb05o(!5M5D{Q$&ghZcd1CbZb9_}`28Vpmg*b{!<ZGi8E>V&pZ=O z9Ir@=>ub6Zsx9pF6A>p`M7kdBtznLtgl954mT)OUn27h=LJ9Gcb*FXKjGGX@s-M1; zT_??wT_~N<3b+_)TMmRtV`T|e9;?@4L-myNWNMHt*Ga`|&5Iw}8wStV8HO94zWLec z%uR^~xpLF-{N${D4O98GQZ*tr?3eV*-+u6DLd3Iwz$m74xcha!D*a7z(>tVOLLpCZ z&njkbbuvL!zHX|W7r#@;4K)~;iFTH1YRq2>ntvNl3|{(*q6f>;V_q#SJzIB_I;iMC zPQ064A4Yuim7#a^C45vqjK2_)5E?puI3$X4r)83Ek-`Jk)FJ_?%BZiZy0ipLv(nx_ zt#Yb~!zEXyMlv%qAK}kC0r8PT8L`YD;!r&hX8FBOH^=EclK4CvIt$smw^Q6wPyEbj zm@?V&L?Ki$$E9kP56QmXA8(;Z*?q zJ6D6$Ebrbwmc9b{mHbY*m(ouVsklaM-D26v#8b%jL{>9c^G z7`sL8hzX`n>z6PW7MX3#vo5nHkiPrwLDFPVuw0?Z`67k zR5V2Z*;OB?Sk~7;E1%XDa{vr)0<&~o+a9Io6EceG$`ilf;e_2g--*p!DQ9a`Cd?Un zjI1>g1pt`USwx}Q{uvOLf~0#iuCoJK1##5aM{-6AZymS4i_BQL1~E}i4mmY^$Cx3+ ztf6Ar#rWrFFS$NQdPkoXMf(EWk?0d3-bYaHkR?h!=!S^_>+|-mMiw_eeP!OemCezR zKkz6x$ZP02#_lhak|msA=n&#p=Fl*tDpRihMEb@e|0nmM>)aj540Kq8p89@5L$GUb zot^J}8N2pS92-sm?#w&kq+%|0xsK<_jvFNw)3bb%Vj3j8VE~g7Pv1Q~;@Y z1`S}MH6pc=soxIBXR@jne=`*Dpx-vXYTxlIkgrYc%f+%tfKDQs^a1aNMDIDB=!eF` z3VC=dnn>}ej?{my2m7^mf@udNo-i4ItA?X!Wdl{i&;l^Q)`fF|Y30~fw z-V%U~I@HscpBfarDLQ>#=8RM?eD6+xLSJEkTsYb9rzO6m-?F@Gy9^$}xC|5F6hVa? z1d(wK+8D@$0sbp4S>i?~aDt5*#=;JW%)={oUN%~WOnb%5anB1TyFKq+FUJ^NRX!y} z*%qTbTYp>2{Y|uuXxFq0LGS>{k!Ms_EEgY5VEJ{()m7|XlBB;xODwH^bMChWmJL}u zm;0nt*0$l8*cqcc<=AkJAx7e8(0h8P=%P`Goya03; z=FbZ~4x%|I0RcHgZ`Np06N6ssW2Bc{NfX4g&)(uYG*>h%uXyf6Nuj7EC@avf*QIFs zsxBs%b>u$WxP)pRT#DPF92vE>9vdRXb}mY(J#cJpULsy2`aloTe?3Snbsq{QtKHZ_ zF)vzER7tA1A>_VeJWtBcOt@dIQUqHJFI2EXdZ(hV_(|z>{r0U$v{vkZ)`SSG^xhHh z_59>9;T1j5$Ez3%8jf4s9G6!m<#^D=*bC(CnGBTDV^ztL4`(Pogt~y7YWXF&H%m9N zD6fmZuH05m4tEtME~TvlBnSY}95N>bw}mh~#fxt*kvA(m-|;$MpG9kWj>W^@K6(VNsnNdwV#X^^fOl-7VBFe3Q!Joi?AlCIZR=XRYL%u*BnzFa+7c!4b7t`ZI z@-pC}K>Ij^G(&prLP$W;1VbJ$d)acuTA8LRhNpZ5vD|bfS~%+AnKq^*fAYrIcXGu) zKa6w~RJW{1srN3n_m`E+cjxy|XW~W1H0~jk4)2gnl_I9Op!&_IX~RHrG4ouA3}Dzx zmjIs`;*L)cb8!ERs&DvW#G3FZ;iJU3rw{Y;n|CoCo+b&P=$_b4qjwdq1{!oOl53ObeP-^S`qC=6hn+KO2Bq-hu_ z-7^Chp^^4qkH!RC9@GNZIExGZITBFIJtxhbd-t4NgL$!4G^hXtOjRKq-Nz?m(@!{c zxXOJVdwG>Q6UUnI`K+u~ib#a9I`kkaXx`Lj ztDK-c|&hIa>zTb=j|D->RwGy3Z~t`oVi zVKK9(6nM1dr<)+|4k^2{uBIbu*xF$1qzF6upcXLf1qwCTHv}fN10dix}2EiU54%SzURJr6T z1@lQ}*i&tN(!w*;$9>3^#I`5qZgtZ_1S-P7?s&Kq%f0kwf!9=S6&@xxzFzFq^YxdN zb+wDqz~I6Lz{$bYX%~+AsOLz>bLH%I{`KddSarB?M`r9lOPmgphRU0&;Xn3ODi zo8*uCRy`JA^HHMq5|}gMM%O!U5OCE3JS7xaqmFwhvw0bc3aPk#-ijAPSVLI+AgpFb z2i!@$cdkv0ZpLBaLkw^^fVSjKx+v7e?%)qC8zNO4ywa;xCM+}+d{Bvu>W#9+{U$b= zMRT_X=E2H@J{>|5j!&jDPwU^?Df`>aO6OZ!d0f<05aj6)G96CN*zI8dqd@&>3kG3a z#c5fj-}4nZH>8H!Hq0?*OBICssu@iTGoHr1tf1(fW=Q&Dn#jRX#Mmu3c^u0{+vi9M zRWWw94iZVn>A?Jl1ur6(2ptbqxtHQ}kikaRAG3EgCM57qa_Ht@=N@xx9S*h?Vs#A4 z1UG35?en&!*ALV0x=06)H%#e#cwDvCErE8jj-$|(Km4@znG=UFA4B*JGSh*kZ-uR=!95i^;D9e$QDT{ zDJubHJBia)FiDwvmrn&JB^&Yv25L;j8wIcK?|V&n1@QQNmC2n7?psP+P17CSVgsGr zpBt-A1M4@^@v7f0!T`~-0BKx3LL|c(%!{-RCa^~y{=24}E_ml3*_5IRvE)sGH`&^4 zhAcsLA#>8R2$ke>J?wLC)9;E@8G(*x#6j)-|c#6&(QbVBEl~`hxb9RpE5Ig-G}ewHb0vB3St+AnIJeTcdq2@lV6N!;6w4MJgfKa82qN1)w2*AYCx2K&|xLiolO$PITQNa zoZ1AfFOqdJ)Gx7GgO$_84h6-3C3K*SSF}--!CYXNjQskPcx6uvbyvtwyXnkc>M5aMkx2kF9%97e5seb8u}0 zC%Lh~5X{h+_mNw!&R+8KZCiMf^^Ab_n6bJ}jEzbYxH6qJ;+!L2k_j*tNHKXAnt<4N zY8uOru7CIz?~~K!-OelZ|GWVI`8et3V$~Yow49`cVPGXPzkiQQau#=U3FeoY2Ql&D zbMX_DIYq6k0j7UTmfW}Y)13DeV;Wb)$)-c>$qa)I&z@9Ih8zl2QY@V=O|j8+|J_pU zA*~W%{i;abEtg~)JR6X|D>Ks-``W)DDJdxYEkGUXn==&B>C6!lj%VCl*>D@UPM-Oh zPRBorUk891B25v*_zb*usrq$IH-cMT(C{SxCAFF_;nbX2FTU45rz8P~`h3j1m65iyERy5mxDPsY zn30?x%1)1pPm45^m5QPsjd7M*8_pES7=2}?`o2Zr)!%zm`>l&$oWCNcOsj>aieA{! zrteICGH5i8bTLb|>y8zVoE$^{aLUXEE}-9zD;2pyZ{^6(Ht9b^jL3fUYp_CYStg|k zUI3+daa`QnF+H+g*N`;P%E{h-XC6Q{HmfrTj|pZ3{}fjjh#L~&F~_-=8YJElV$p^V zhlMlQBTh2%KagV;cKX_U9^LqeV=@z)*cjx2GKtWmaaLa4jG77eE_jQtfXE~JwkOUG zz#3wG<*7?px8ffjj?R0QRzk(VoZpyUiFJ(y@V5QTA9F{M&tJdxT!{sx3j{(1{F*;F z6_c~%H#^JdFwc%c-djlH(*@NJf*0Piz%J#sSq%i}(^x34k3NCBfO|4W@*g#f+3LoU zfTdQR2e^dLKt0>bDKn6~iyxjcm1)e*ChyX}=0ty#mD^7e?8*-ciw!%rPwFB6*E9hY zmo66{w}I|)()HE)GAE8k4gT1|dRawmW43J#nG!g$yfl^4gz`Vel`<<>AU>ASdmVDh z19>Uzz|1$$6eK3qo7M;NEG;6ev>-+`;v~%i^lhr1MxF~0bmU))^IiI(tab6?&CyDI zaJTG9&-Pyd_fHsr0bH>_Js(EF`&eMS-{RPoC--1*05Rw^N? zn9mQZ7r(%nWZap zo%>UE>6I&C^8G`zTr&)t^KXW6S5S`RLVP0ORg<}&1iJq2FquL~sq;SCZpH}R2+jzB zq{G2+#LQ_vY&W#%p0NL|Y5W4T52|fF_;_6T&RtIzN51pz;1)H`JN<)TQ2ks`8MsQM z>UQO%rU605)y35IV*9>p|1i@sy9hp4@KFGJgi?#$Q6v<_R*D|%yFXOUzj#VL5r``X zjzKQLPJ-$gMG&{Kfw$M6#pC0qBypCwW{6E&ij_*es4g$fqtqod7k3HQnA7CLeFZAd zE1afss=3x1^Z6f=^o3y3=DF`-WS?j5m2u^0-Z#UShh|P~Nj^3-pu#XH-LE_dzaI5H zK(+w74Sx&YFPM(p{TDe{8>tlT#FsFPAz7VPJHzKIv((Wie0Wr2c=AyQOig1=S z<@Md*e{mqkxnyqPKZD%Y!|UnF(iYJ@ zdZFMu*c7K*Wo$EdMb89XJP$d%^_s!;vHDtsUlpj^F6!Sw{;AQ=>HiGv=F=rkT1jbD z4!Z7n39n5@1)x1#gk3#to|P2UZUlg!ztzKljf`bRpU~vT3ad-%zDh)SkX0YfGWj)n zy~s^toHes8P)?=CCBYIB2#393mHk>w*sr9wlVHc%1-l5kZ#Uv9i)=HlXh#w@QIs#F z8#B5&F2!rWiUIyl!&eEFl#LAdVf)7i>YFehS#O6S1pm)BamC-rZI#=QA@txW8GZ{Z zmD=aepI-uNTi`QSKl#5itYPkWM_eatMh}eNxCR>(RM!_-wqM&m+8Gz&2+fx0SZ-*f z#72N`xKR7Nd(L8YN|8_kyRmrgsRp)geykRM@gw!d`a6xnSfjW!K`qS*e+2e+4T+Hy zb2>SSzs@y7e#KSWc2M&76erxf=<1&|;^W&smpo2aUC+~fw{?z^&fa@}DGPiJUadip zzN>e1MX(4pJ%e`|hR}rcqJ$*9f?=)4HKYn6=7kQ-ZQ)IJ^)q4f<+eECwU2#Deo%kt zYL%z#ny1DGz)M$Qy=OYCi!9)ljy=u)w5X}Nb+0!!H{2T4Cue;&k`Q|KRPDcp7d#Agrr;`lba^ZRbPpE`oh=$LmPTdyU0 zAt$ItbW#ATqb95O!FgX;_c(B#OBA%~)ZcJV-er#9Y0=dg{M=x9S^uQLJKv`lO@k!C z242EDN0>E$v=AUlsRmUlHMlOfItLVivY?JT){TDR)cbks>~LfAt=cPAPPN*U(-aq9 zTL1zah1S@7bAO18_}WR02UG-}$5auT)idT#)_115KumlGxYVrcdpHOkje{We555ol z=)UI8n!rRqCiXLke;=hjk%dpe+Yr&ICp5(AQtJCpAI9s0mRn<^3IHKnAVSSe)Jm}S z|IDdLqwvOonk)r5<9vk&&jk+e$Z8c15PAqDXERJOj1fAtQL|tc2QJ^2k~7=|B|GkxmzNjvo3GsfsYoU;03_p{2M^x9x5#P7Fw(dv3H5F6 zVBlq&`_HrfNI}~AGIx%PWGLvX1d`so_mD9POr&_#>&)AD<|7T`qXzX!t)x5a-vd;@ zBN6?atuQux7H^u1?EECxWWK_DUS6=0qoCsxZ|eQ2YY}Hc+MbmuWY85(%--zfqbt&k z&RtLf8bWo@Bp+SKap(Lo8~h`j8>G)+uySWfcGjdD)`e5J4+bZ7?&&J`g~r~6rsLR% zPWY0Xwuif-4AlwQ>E4`PYgp$hX`;PU+Mj+v>0dS$Ed85ei z(8U1OtOC-muiTjO9o*z{EJ#Fkx9lYPaov{LQ611`nQ_Apqd<4DJ?!cH5&Xz29j`%D zBo(f267$?(6%1l_?~V@Uiz#m1GKJ)Z{x;yJFb8%zt@j=-O~S;Onk=}^_fIF#8No2e z_PDcPiCM5YMtdN{fp73Ed(v_}y59Mx0i$mEr0|8fqb%gRv-vwTF)=Y7phq77pL5z{ zZ&&KJG&hHL{vVP=o!G62pl0P=VRv-@-To`81KtdMGRP@~J~qNB!qLqtc(2qPpbRE& zJKGr1>{4@2UTMOB#D^HjAy>;x=NccW%%xKeb3B4cO-CA7!mm_!?=%vRF-DamU7 zPI;PFe+BnmH07}zN((hky2z({qCXUSPupHjRxlta{g>rF zXT*UO?VZRQ6)d;p3ku5Vi?O1U~Hliost=x##D7&nBM-fIdI$WSCY$~H+On_oy&|H9*Clivk5rqlK?U!6keZ3;SKCP8qQZjh3Ms9;7{YJZ z`OApxc8h788*g)l@$+-}8IrY}+mJTmu9j9UqAuwo138|s$~%t)dIM7eM6g^G6=ZrI z^4xi4dfka)M7kvF?L785>;DIV7pPBn&w*%EW-vArk!W~%(Zbb6oW@H{T=60Lv*);& z=K{8Lp)G}*5h)M#VYvN6x!)Dy!#L}pH6A7puMnsE%lb7VbIafBe9>Z`(t}c+Gk!Y?;mLmdd{^%^M1$lX?b(gQ|)m z!6+RyUb=jQ(b}_3nuPICKW9q@lQ!asG^^|rn6|97yfQ~N_2~qXz*uR@gPGL8*><0c7&w!O{99U*Z4YL#=yPR*8?#f#QN5 z)8^4d>Yg%qhkJ_)=Y@NJ6|Qo93dM4$`~G0YJOuFkwHmc!%YA+Oya{sx5B*#er|Vlx zrE&utYtWJuCK4;tYca=ZSYA5saTEdW02joXJG#sdw1A~IKB-j5T>kyr!}yAO&Sppj zg0ogXW8LXQAe>WP?9Nx(KRZ}-0RGXf!z&<<(qTqih=M+ykQIdR3WAAERg-1xL2PT4I zTz4!3cEHxfFMree%5j^#jypXc^9@&)Dd^XPPB#AtK7e-Ja9;pTm>ic2U%0Oz zogb&L(^(?;3w(nk#9+NlB3vJB0S)7Zdl(}^a!M~rwk`}LhI`(##Hm7jK60MnbL&1f zsRO{HhoGcZ-R$h_0xXSZ4#-wc(TkAGLElxVVy=c!YR_ zP|FEgaOGIHQd;K@apZOhegbX@NtS1ot@e8Y1=`d)zWU8LV|V6lBbUNaO`%q`N(pTv zy}Gmg>YS75*dfWf15P>Z`oeiRN)&bL90))stq$JLC5I&n<%2GeWQI(r7^oy~&T+>( zWWsvkn?K%UkKUNZy~0%~UNc&`w?o*!CQmgEZ@l-KWPlgX1(yz&DTtV4vT`K0shtTr zQ)Q7+3^wXO4wLf$Xq#cLrh(`>4Kfkfpd6*3MxLtWn-`qw$V(i%Fxp)TV4wZ!R+ zYor5_`a@FEarJzvFD05}R2NvibKJsEcUA?QvEn%OR=t{2Tw}6Yw~{c6ANQwBQ+(`CHPEFmLT2Vl|#>z*C(T>QWxhNzgN3`;Nh>h*1 zvvDg-#!RIQdl_$(B(F1LRkQbVE!}>g|=)%4#K*;2Oa`2Pq|z@t=T%l=|xd z`M-1M`4Y}$_J&*#)KT#eUYqV&L(KL6jv{X{2r#R)rqZuM;?@TZb7Y@K-*l_LUT33Acw!}bkPdyH5%TAXieRKb5rI5xa9 zZWZPL;v&fJRBNPP4@d+-D^JOCXVLZf-Wca=Ri4%%LZA|Z688$f|E}V)lGgLW{Dhuh zys&le{~aGa2v*~t|C}GRg5dzg_u0BbTbwSp7V>esK4-D6OyRaJr@M~&+eXk#Q|W3x zZ{`efgZpRubZxRye*IuSePGvCEws3@QqIs_)*UO4&dKu^jKEyGwte%6$vbkvecskB zI;CDD`^~9OnHG~>e0``bHx?g>(>aDAiHaGALL&}_mv@uFm>J{K$m(LOMvF5zbuq*C zAx={!Al_MB|jLsB51OTu-xGQY##CtwV;lX>a)pB?OOBegZLVIihw2#r)g3s#magI`GNKO&7 z1)LToBO@aOzj^EVBbbs`*{E>a2*i|k-qBI1ubAB1?p&6zdw6(U{57{p`HtSG*E9ja zF7~T2!w8=NG8_)p#jVTOmHqSI>?qeqrdbC=Z1fIMXlet033pVF#|8p0#W~4CvSVjd z)f*pX8?o*|unGoNyiuVxkANj{AzWr#75gc zP#B~!J~hRob7I|Q3WCe7MEg%xo8Q278G}1Au4heXCn?*cq94i`wOIl0eNcvwKga0{ zh7_nTKkXgqt)Z{6`qQK}QeRsLCAuBSHXx15Cb|1w@Re$LL3(RJ{`u)W+(c(b@$jn3 zqsUOmGUML88M!$Nog8{1sH4^IkD)d;4e-x%+eU4~9fcer^1O3dTACkxR)xlRr=Y0; z7bs&S{wY%zokX(P*DG*w2O|PYq>o+*rRuw)c#!dD{541gxl$S(yl@e-Upah%ZI20S z)RfCU&O@C|{AbD4a#w+Zg<$Wxe<@}&v}g!dN$79>BY)d6l`G*Y_jb)C+!|?zyz|zj z@>irZ2KMt8KC9LJ#^^6jCS%)%Yu5FR3%skV{ZZ-}&*hG;%G7$U?JG{RB&m`VG(9oqVH0C-xhO_OlR6P+&>{?`MQ&PWghL~T^a|y<0*tk3eG9zXrA8<` zc|CsU8yWvWophbVP+6Q`K6F{XI{d8a&ZF{N2#@72fvc)5+ zK>@c%*86CTWrPv}=`NTQ&il;|{C#=_c{Rt!+htu}F=HwwEr`d%WdDYl6cm{b zJQBdFuI>H#hd%y>+vmGmw&uf+8Lyq91zAc7CC_BhP9izFH2^bH0cQT|mj?8t9ia$_ zBkFz&Ll>Mb5E-#~67!e}Cx1JL%bWZ49MTm>fn!92SzBN9mQ0@Bf%zI>vhBt?kF~bW zz9jtjC5U3S6>UU|E#I5{yu5=O;&Be2!+j1*!`M)mJQsp%S>N!6E`Oae)x4ZYP8k*^ za6mt-8u|04X)hpiyAjW!^(hJj_lmCYQtJ@8nc^=0nHxnbjc+u{CPl6z?4bpkSCfZ0 z&zdDePfA-OfLn?&Qguz0qK!!mj{E&vxl$A3`2OAwHY)LtD%?anRpIXBoi58MqWfI` z%>6p82@5_C0JX->3Ad=sAG6qm^r`;#JW2~p{<@BN*C9_AzKX$&&6dSM$L#nrLL?u% zpN8k+9*V5K{xqX2ZI@w9q$4Dhv;p8$h5UG>%@JK@C{Xr@Qf{&Q9~0v+cU3I~HTGX0#3`-&{O5>LBvw`R&P;B#zd)gJFEueu{Oup} zxmBUJ3P4TMAwvVvLbrn-!$&{y0*k#FA*cFW#~p>R=QrHHKP&6%EJA#IP#ny~7WY_U z3sd9CXf`1Iz9^EBFv3_lBau5E0g%(gW*kAe1&0Qgi4VfegY$z`zNlicg7^^ngEaL@qh+bOB{%ZgN!Zm(4?fu_fPH6 z;?%u>hH(n0H+iC!=q+S3%L4NA8Ba$OT0G?onS7pOkcspNkDM~~BuSFx)Lp-vqIz$@ zD$ia~rC5?esJ)F={bLR@(y9`@3!*H`7JT(a1n0{7CQ)5jk?D$sBRbL6wyRB%hfy>I*On|he04VHk)c0Sk9vywjtQDb`H~T z1r3BKt%>=U0cdwFC{Aza3{~QA51;jxm~xnoNa|glNKMX5=z07RYHYo6%oG^wn}6kP2kWSZx)dhhDKENKXrp3~I4|5{Gu|9Jt%V_9f1>*F_cV6)cmMLF$- z3xr0=L|)5d0QIogX@J{$FNS#Vy}rmRZ{Hm{+a1@aujr9;Zr%bU-}C)t-mY8@+-c$m zOkPH`HOQDzsayCyNsGinHhnz---T51XeeHVsLJqPD*1ET#;AkW=T&{rYmzDvu6sKr zK^#)<5Hdbd{Dpn85Ni~6>qag6TiAL3v`^M5OoNCdm=~vEfw2FlCd4dsnNfs81Qvpm zb#uKcNK>KoA5o?A42H~xwzhuvZ1~KS1?f-4FdGBw9J-<8gSOP^<&F>44^@tJ#;J$};K-B!e@i$|A}9!}A#Beu_6 zrY3I81EE?@{oXpOw3z&gPjQ`<7sz?w-WlBYT#Y>>U9<7|gf+NEy_P3Fblm3IK?mx1 z*Y3l-|HSF+G#CX}&Hk2W_k5G$(hVRia|D6sO3#C@syqA_Y*V`wp!1elV*71z8~-0q z?;Vfz|NW0&1DCzCSJ^VMH-(g)m62qGvL&*@RSF4Zi^z^-W`uNQri`*zTnJ@LM&|Ea zulMKsd;Rn3^}4xr^LRea`+)@g*tqY#&M(#gVb^u=ErGR9zUXcDEUAES2Etnu8^t_mA7Gpuf~myU0~~0 zYTK>LrZ4X*d20|pf}c4mX*dPiU14wa*x-#q<~>y(g>4i3%eIG(=3j~MAG9QG@BTDSh_CaE7_R3}%+2~*& zVRkn0^>v-WrS1;D^YE~XUA1ZZZgvPd2haw{S1t(am{cn~v;5jy!Lr9w*=hAfDw`#% zMoh6_Wf{p$Y-^}*@zk8BC>fQi^pD-wZks!^0??;ew^{cn1#H+@ zU-M|osi-0m==t=q4ouNxXhnMCc>$vL^o|2)*+o;BrIO7D_w{e^koaF7JJ!i$P>5l< zb*1(`HWjg$>QlKRC4Q#3R!Ir?g2G%N72bkk$SVsfItzrFW9~lIiy`Cfn^GFf%c3l+ zm-Oim*?i8e`-sL^z6{MZn`75{-tYL{$9jqs|7wMnYN0)VHlNFxewz7^Yu`N{apR0_ z*t-R7v;_NJk*f>Cte>OMK%2SxR#a}(95(DbhZCJq4JAksDfYY^s*=i?hrT15Fxh^3 zHN%%UUNx~n-l{0M`qK^f7N^;kb6k+LrCOOSRA_m*X{{Tzh5!QZsnDg-XF>*Qto6?n zh{iQTxNl@_)?&HcoKHwX4G-1K&DA$+d?h5iTu7kpT_l%ss48mcY}B7bhN#$-#&+p2Ieo*S0>xeCs&4QY4t#xRm$y>eJl(*eY}uZr6`sW9IMZf^+bmXUpZ)ZZgKiF*5}jeU0M%P;f^7OJ}Ts6+}W>}5}~7(mKy zVrN1S1F&rC=6V+;4>RvaolJFx?;^`_CdVQy6dV+XkA%h(Wdh<67Kl_7GkW*i9w6Ot zb?T6(L=90Vg<0KK_yfa*6vrQqlBRjE8ZY<|Z*Kds`dqfCfyy_dkI|hoo7-h~JP_{M z9JeZ;ny@uIPrAlZnHCTWpC}>8i(`Y{fl8v)1R7YKWR4qN^XYu8L3$wy1#B7-$25DY zWf^a*$!7!Sk+`$ixP)?bR)wU!M#;J?<~!1fgXas4vel3Z`~;lhdbt%Kiw{&G|I>I} z+(t|bgf-0YYQ_AX2(0>scMdscMPot7aLKEex<6L{H37y#JAurr)|5#P#UEH&Rw=1! z<@CP<-}cQ1>`RM%Q`P(93J+>F^l!-Qd55#f7ez;CY^X8~C(DDwvb+>FP`L=hetb8w z8N)%7NfeTtR?_c2J&{8Dh`4S*DWhNf%$Ju^r*Jhe6pJB>?CQsks;HmNy ze5%*fp@c4vK52B-fz6s)5{b@g}5j{ee{qSRt22CZzplJ%@0Sp^mHZ114-j|W+4 zHjrxGtr)d05aPff%&2&wt@XOj$T}ueKk1QP!k|7iiJV@&rvwCI#j=mZ4P}VX=XY0Q zqmRV=#Lm1DGYqIeQq!tT&+guJMi&t?O zSatf<0D~Co=p{!pfD){ zQ9O-=gY>Ch{mw(&iQV)15)a`GNqlsK8x8LcgQrPfO#PmiER<>ePusqGNh+?B@VJC3 zAkfRJJd&FK_Iw#aJg=FC=pc$LOPy#3WoxA0c;hi(FU8Mt$H z87s5~&XV5z=LD56qu*zap3D2B$e8`9^lc(ma_n@QAFdG_HuGW0w1-&DP@l_N+P;hv z`%?IxNadi=KEx#n`t`G+M53S87kkRM18BDI_YKg9@{p5HQM+S87y>AZw4L5(TTTZl z5K7>*qomUZI&OI{Ezx3My(fc#s^v5Pv>dfN#2SRXhyO+dg`C^GJG|-=zxSExga720 zWKcC_XMT#ypj&KJUh+e7Tk24~YHSSduJ@UnlDzZt^K(Mc-K3R$?E810AYgo7|Ht@J zV7aekcWvFrtpeLMLVuBNlYCWRq*B;!>?`74%i*C z?*;ouk^EKpBc~|)P#EV{r5V)->55y`L$kr_TDRM7Y-FjKVvPIeR1^P#dPOf>9uujKk!S3TX(50&6|#^kQvEHbQ=QV{03gO2=?-3Kc>fBmV{}kHI*qdf{sQ z2{bS0JE=;u?|gKLEr=VKAV7n+&*#Ep6U$U-6fiM3ex05o_EGiU<2QJAe(J&`h@&pS7uWK=;{#05bXFl?J zT=|WMvZzs6Wvz#7M(&KJSzLHtTzU0Fneb4U!0~(UzSKZa8zYI1x9oCv(&@{m)(9zVB?Tdgl{F7XsNp3MGC-LFyG+CqBU+*INzqy%(g$28}t}&zO zlU%#uQmdz2sxKZ=D@6fYakCQ&rBtOtPj2I)(Yx$G>p-3oEOvYpibsLNk=o+Mc6vA5 z7*eIoq)^G?%)j^)(GU*x4H0)oyPHs|k>alBqUoV(x51Z>oo4Iywj(*_m-JYEo()hgEDFAY%@KCTq2pYxOWQM0_ zv#qQI&@abMi@5VZvTVEaVz;AP2TEcUao52VM(67Tohtmc&rDd3%N+0(Tnsn+p+B<5_@Jv{64RJT%pp;Lr1u$P#+;77Zn=!+^P`f= z;e$9Pbc}A(6pO=)$^)2Ul#u^tRhPrLr=B~Eww^YCZ)O~Na0kxeJ;+9`>#CRr3C)a~ zyzM3uKmBF&Yc2c6q;;4Hb92FyrRC*`T%jke(XGgnO1n=e&-e382<nw$%2wTn z<{uUQ`%lrguBLX=aKX`K7#Ntno)q}aB-Tk_L-Xp9?Je;QQ<#e+hMU&=T(G~yJA5{{ z`r*4M&MNd}DtBSyW(#NJE~RJ*p+NXW4i#{^!nmb|0&neb{sF-plxJmY-RtPe->T)A z@tzIef{86rbm`6PdO&P!GcYyPJ6LQw7QFs} zRoUM?T@nfszN7HP9RKUuQAHhwr|F)U9pO~lI+p8D+<;UbV4Zy=jM*v`6fd1+F=EuN zH~i<$q&+zNN3)5op+M{RVD6npwZ~Jt-6(yy2+dP!grtSf^L{zb&AgT!Y@eK89iQ?h&Qz!4=>}18PALQpO z2|kJWA{zLEm4dJeFTctF(TXTfAmH# zK|8yt+drKjqP*tcD(qY3FjEy(*%D74z!ON$4XCoYKid|)0;o+I!9Bd^P`|(b z_Y{-4PJ$)LVU`e$Wm#&#vj9#k>*5rW9D z)>+-R>47*_m7`^_0_YT;=?lkfEEkWdTu6HK`nZE`-31ge9ACt|rdUU=6W)PT4RV5Z zcb3&t@X0;sJCjSqrV85JW7@c8ccG=GzF>@? z=#wG$GogDkk?HbYQv;#ygo8soynFB7z|`6eFQC9W1PDJ?)z~=k`x3C&)lq-I^)_NP z!!3|u{BpL9>CK8ihz_NdwPMnq4E(gXscLr@B11pTu!UG`5Lk8k7`8nx^9!H8jG6~^xJyT%q!;B?{3lUP6bNTYTPKmMmTnCwV=6g1z?M2;b?+hiQ*^RhfeD-7YZRkTp! z)_S;%S^Gl$J$%?jb{>bC^|Py4o-K(X#@!}}X)Z-YuZfHh+1<-zDP0)A*pO#ZgAggV zV_O^#4ok~8;>ENm{>cihm_y?V!UUah@!!A%h*Q1)Q!>G_7IZ?{eZh^yd^gC_jCiB& z%Q?~HFCjzq-xF7uu=eI0mM{C;G20DThid4Y`aU#N!zD@Q9Q%R`#-6D$iCS)yaYZuAsZwH`O))3D3V(>ElXIWWUB~EA%;u3%vuA&`}Py-HXPD+aaDaYo>q89({z_b_K z;zxt%7@lQ-8m{?yf@CQ;_<#a?pY`Xvys0VMvc@%~Cl2#{-2#T0pJA2Yc?*jXYXQKp z24z$MKJcSl|2vZ&$f{@!%3^Od?ws2mh$T`2MD-j4h$`B4cB7ia1#PG0A4R@Q6vA!Y ze1M%s#RKDXhrpG|04U?T@>n?S%N-3a!m9-02L4*vq zZ+%RKG31AS+VQ5qt@`!mgRB4CwBxyY43$^gn69}?Ch0-$Ff2);01@JMv&Az$VHFY^ zaCpS-Pv=REWSWihLKef72>!OSSSfg}qTu(xk3;fOA{;Ixc6Mjo=#P^Zj<;$7MX zZIWF`j4$)bmMG#nvFe9mJ8LSvYb6y2dy7-)89D;#`t&Hx7OGV2>C^NKk0Z_#@sj4& z&rUTAUTPn%F%NOSU+Q1^rgpruV0_$lsI#EP)8o3UPmYKXX{;R&A0?UN!}5FTvZf7%DM7+4yyIBbmN)hkqJ8&1OT9Vy=@|$vR3O z*v(NvA0QCwNf^CZ*h1g3x>!j?H|$g^$Z{@^qi<5#5tTxt0Yjn$XVe zyCLz#`+p69dFHxmvhCj15>-5`E2NctH$)20PS&By8gQDanUE2g&Wz;?yDXzUc_t&o zbxiWosY+v27KH5A);@in(-j6VowT>?sv7ZNPN(vmhVJXeMrBOXBX1!GlP~j9FdUeY zD%R_xf2HNk`7g=c+3I1iel{V$!;HvQeJ2#w`Z=c{Gr}LjXH}r!aWC9OnOqGpYUt&WAG_>|{U}55Jjsss$_)&2+v0@ul*;D&NIkpOa&#)K&VgS_AW6 za_R!uQylDI=o3r*`l0yY<%FbBp$hiVETzj2m7iDGpTFR_lCxC#w?Ijotbp!Cn!ioF zVoFRUk6>kXeOK|9ch!L6d{8Q#!mAjG3lDf#v<;`2IGncQJGJ(kMg@L zF9no6@sRVDHLu?w#Iyc#7343&b89@_VpHMbI<7WG{G|^&|ElAz*LS>H@S&)9zIVV^ zelsD}*%+&)0Le!hIo8I&<~aUB z+FFVvI~a*TGo{@-sToW+KV3l3A7J zwX0rh^Zf&O;TanvFunb$)7O;m!^2m56yS!NPgRKq+ZUK9p7nz@!}RQ^+5(_^pA@49 zpBc~AG3d=D>v6uKHfY^W%Tr91VQ+t7MG_+F7bURk*EA$jD~R?riG0S@c^cLQmE(I< z5(?w$!VEsJ;wA@A=PT{XwuFR`OJHmtUTh}Z!Oa@vS?m~D$FcCY?Een%3)LH=vY?9V z+k9|ZHhUc%%o{&OZljd$g(tZvjCH6?!`LD-)lN-^fy1YS&Evn^$RD9VEtwg1{O>=0 zi1#H~lawE9F@|5;@ zWT?aW>hV*0U0yTQDN0LD#lvb9E+%aiYfJ^vEh9M3p|bZmIFCGeh!Xtkt`1m}<0w@K zNqzJN$7vl?Cq%YO;W#BoeFfYkHn?#)T65W?O&H7MTOow^O_zB=C^pK2-P_}V55LZn z`M~F$XVG=YYrOGihaOPw1^txtjoAHO#JMgHLe(dwMMW+_?`@7Rr#N|-D)!cTOpJaA z-u1cfaqSvAvu(iXc8BL7Q$=I|Vz0wbG~Qc3z6`Mb?;w!1x<7elXyY@MiiW!grbTZ5 z>Eh5_&flfa($OT#0?t-#d17ZoJ^#5d7_sijG0_h_I3Zdy0{OH})T2u!2CN{G@{P#V z(0QRdLAFKpLdhe%uKIGKFKs>6JNO#8qJ~x5I;u$U3k-MV8>+sF*SEXypzbKSye`~4)7FLZT~ z66FlaH^rKOWi2q{eiUnxlCGcqm=69*k<~I&DdH~U0n`!v3#iW&3v-BT4GrE#I~M^Q z3ZEdYLlwQyOD9p`4q!hgZ%jhC9NQk^6!aq^$CHmlj=BkqUjuQapB<@D>Q7Y)&Gc#+ zPF(D`W{&>)Y%dI5BYMu9p>C8Vr?l&ikm% zIQOXvYswpeyV9MC=>y6 zpT7P0(F5I-?`Iyfv#@TuKLwHj_EikQFi%$8qKOOaq$3|EV^@t2>E<}_6Zf7So#SgM z+#m39{AH#r=@-r>tzUoS8??kj)iPVXbgO-_WzP6hOBAmkLhrI~^_6O+^P_yb=a+c) zlQXcv;TIw5M*Fvr3`yiupW&P#!9Oh4yjJ&<9Tk~Qzcb#humKc^ z;^LrCqDh#VdKkMAS%Wx^#1;e{etuL@;1FabJysNz@L;T}=ip zzg?M9ompgq3Lwh@VW~H4X#W7~u~&;*FDJ-QuSWE23dS8NWqW6Q3ega5YpT4l$g$}r zc#ij?rDSRaSzUtC<=9app;$;^l$i3VzwwYqEb1C`UzmJ#H-8LgRZOUrw)E&8E0=Q8 zpw!_bS&oNGlSSUz73@8OoTO=DK2g;T3K5Iwvx}tQj?0{Q(W&WsEpoAjy3eOa;JCwB z)ok{8YybJ%RsQH~j5xnad4UOHN=In!AL87zb08E?FkAkEV<5YjBFTwrj$F0_%wtYu z$lI;jX@YEK@KGtg>VX+XGHY{edE2*%F&t@td6DWCn3S0m+mPEZ2Cv+C-`yiW-9298 z*0m^D$NdWU5dgNJ(8tw=8 zWXQ+=2j(Zn>V0^voayo))q|;0JJ3ol;W1X#KJk%Xr?ThtBxT4jU3_~)#wiNN_~FV+ zNH3GpOM*Zb+tbjK^O|w~C`QXf$!W003pl5JYby7CwR%&dGI91b;vQgUA!@ zKeOd38p%8;_VkKtMtGP&({$=!$e#$QsA^Jv*T}^>T^uFXivd^FS$kov`{_viPIw(` zJLoWkO3ang#D0DJsfd5eZz^RV>CjPnfx1o3>q)X^bgqm~r!w>6VM~jF{vmczHT>FpB42!{Ul?bZk{AD-IBO%+%#gH=l+GiGWfs< zzb6)GcJJm(gwvZadqecSg!R+k;CacXK1KD2dprZf!R1ZXEp=4e2f1&?EvI_K4k4t16U@p7JG$e^bvFajBG1Dw8 zEPGB%JPG*?rU)fcOq@&nku^~-pMIejNl?deu-6rLHD43V>(jyiWoVyO?8GaS{hleM z@J@D@MOUAjJsqfFA2B`%L^<9TQOs(gQcgl;Sl3M!X)mKf{+;M<#9283pm3JN70oGyo7jf^mv<^m6%v z=HjWp*Vh++CK?sKeH)a@3WFlPegFPxe|veX#?4}=xg01I537_O%+%n(z}6aQChjb$ zB;>!4OZA=`*5G>4q_!Q^c}C-)a-R0VTpHfpE;t+m51mI*gS7`%Hx9nf8bmG)@^}mR zMX^2qFGIY!hLJ0v&dD3s6nd%<6-n;+8Og74VeEc%^=m~c^fdBV5a1kx+)HoG1ysx7 zu&_!{=WhQu}>oZ-}h^)^60D+41Hjisw{;c0F)nC`CS)`p=Ukq*0 zC@-8Uj!3vi!JE_~hnKE@uUmrgC3OmnT&!&%dqlg1O$=uUU;5FXqdfrnOPEOs<69)q z7Ied>T|aR^Aw>#*f>I}Hx&_H0gl>#YlHFt@_J+WJxWJGrxa}`POJ-(19+4xnhn!vY z@jTtFdgCGdMM2O}D6OrnZA+P%rS#R_Dy1^?XupJ68*%sU-3r$s5o>GfU+Z=G#;~Kf zk|^K;C^r1;ic@#2zpjz^JJ@T9#*jufw+*MQX@H(&1w2?)rK4eX+e)< zEbWu`yIMfzE`?HV^xp9p_hcM*Vf9x7)%4W^bC0~=bJb4mmGl%dyxQJhlqK3d2f(->ca75=I=Ph zSBHP?;Bq6aJF7b+brx{YmR^ehhJsf>f+jOjN$aIsXMgNm4g3%4YspII6BUUjUnoUz znW|TuC42*5YlUKua2OxQxbf?x?P@je z#J`g#fvl{L*p_%QSq27LB`{9w&fXBJ#?*5zLct`~O~ijQ-rg1eJ9mF#-{OtvM|25;m+#;xUp@5BnAK>By>7&IP(vaA-EApg`LNZ4o;>l;2x;5d+NBNp!Zb@OV+Ysp`e_R@Me{SqZ{FACA*8AC; zQ`xDf`h+$@o8}FlynPh8P`iLK;_3E)c3^|FgM>m2q`I=6>F=%c5*q@u+}iB%AW@C*++7GLfHma3|0L zzTXzdsqCo-H-2~0Iy@135my{RGB_X6S65`3!7eeB25-II0EYz{?z4N#>C+UvaeYqk z2#PV`7u=`R-GQAja1QrYxL9)*Ek{qD8^X=lnI4hx5hA?k5l#?TiWleODtxLGb`+>A zFQiQep>kw$1$Mn&&j1@_@K$O=Fv2g!MX>jdC};*TS>z#Xd*b$<1Tw?*rm*hZJ>)(9 z|6aK>-LSI5I8gwtSjd;3T!8Br!q_|q^B<3lL{Vp42>6&2 z;Mq}?5L7`%!RxRjz^DyOXKvwUAifzm4w7VEjxYCTM;(Q|NoL+Rll~mIXvzRB&@X}- z!|PDK;W6|siYfm1_h11YEx|3zmGy+xGO@)kuMBgT64SiXO<0#Da8vTUDy`0ybmCFMDnUXDNiiYElWLqS+NYm?HmX;T}b zz4m*Xtg1>#C`{bWCVxrBSrEXjYT+2vwc8_f@*m-%ab)N98Z7(EyDxLT>fk_l3MO&f z`U8`e7+$1aqIW5lFG9Hhfm`H{rU3?#*AQJ;ME(6{XqHlVM*+=^R0k!ncGD*_sW|yf zyH{ogVi6`$MF=-Cr`Lk0Ewb!?qUX(B-zxNpw0kpe7~d9cOeM&ZNnJ1BH`qltaEJmE!XQs!PQmQHfr$w{!Yh~eds-V_4SYPek?lu zt=%Nzm@K2wB!x)tJkPV9xfZMGDN5(5_JudxAccuyYUvT+*O;j8ST|9z32s^w6n!ni zv0!Rbmrwo5=RE~nvl7*f0mi;C@NJOo>N6!Joy9Mi-fUn*%X; zMcUt^)Xds#gUZjRSHQUWC4$Gk-oaH@iz>_erEIcsaO8jdsQ$&$!^1=LdP5SR_dkKz zLGWa>fU76TQriKFIY}FPh15rBLj>=JyNbVoO+^+Vq&q{i`$kGGa!m!l%ZBXD&ahK+ znIRcdc9}Cb&WdG|Hf(41i+7&QhX4eCKU#yETr*5Tw@ZG|guqlJr6@C(KN0WWB!#F~N<6jfCYo|n9uqb1f+||wYm?%pv zPxs0C=TJ5LHY~SY5C?d5ZOwC`FT1a1#P;jAZ{L#Qp(dL9;u<*pJ0SDC&s;AEH|%=G z1BZG=Bq}(G3Zt{n%PF8OksmO{yuR=ev4a42eVdkotS@;4@3A&#jsl7z*D#izT66{Y zKkdxt5X#1oD~6mJbG1mz3?(fFD@sCOW1H&3W)N6ct+*@UK_WA%a~E-@*lM2}iAzQl zut25VjO3W^7h>wdQFl%mq8Sy!4GT;-a}E!#e1WV|r{8BuAQXDdThz!26Na;{sId*) z$Mt@8)ZJe2$6K{cWUI$|>x^^G{p%4g)iu8&l6(h>FU7#mD*B~Ff2n$!;n@&PA<=Yh zJPmUC((8>We#^s^j*;z9GL=mb{VIN%!+XzRNi<;)Z%rDR6^!-8nn$!YQB?A2Y`=X5 z#;t^awr+4=RJo_>Y4}LFc5~0}I!ez;B1GgCZzf=_#M=EB_X; z7$aWyT=kb%=x0s`%aT(V#jQcH!PsDt!BcTe_ps|ANIK>UsXQS;*#;s(Nh>x@JD@K& zK*i4F24M4VHycbbS6U}7D6A|}eech!K@<^?ih#Sf5>;BF=yS8{ikZ_1CIsGth3J?l zEmf{J;x4^sek1LkY|FuC2{gOUnh9KS%{o!b!TM?JmAsUnpX7sSMQ3VJ(2cFl)5RN}G?QxS`JXrpI> zDRw*{&DbXvbo_1Ley;%Rbxi+|c-~oVt^MwAh1l7?cS*cWC(4|A_cfxw$dhKb8}f4S zy^<;sZ(R#r~uv z@U=4qe;lR-pgOmSsF3}W27i)->fmjNJG-~SA1VP{hDCMKQc?*w4^4{O;B>!NR`jJ{ zl4D5&EBw%sL**|lj}F^Mm7Ktn5d2G=5?&pCdxAG8E6#m6_J&f7ugHLY?TDp&Jeq+KP3-Hmqox+RJA|v zWJj$<14UdON*CC4W^VG{h*JB7eC;}=uT_K)FQ)9jVz=1)5)R5Y#~gDJS}1x6>2amw zr36aI4qfknPl7NT#=3WV>0E-@9C?Hsehc>E#gJjrnuK-x?O8W??Vjz``N~w)A#M;s zmBQqqb@&jD7_f4s)C|*S-6bAb2emStb{Q1ZR3Zf2DQ0}b3#at$#J$6Sav0>f17Jh~ zX4)%OkWZf!D=HS99ePZ@(H9Z;@gCZ(c)h=#6<3b#Bk|-(ik&_`n${#?4?=go-&(Wl z_VucN0uuNNyq$k;auKO?3&-}Y;*#k)GcMFWyMx~h&5Nn-_Yq0f<%HX5m-%MEGJhvM zmR(-p>{CRVdYIZ9i*o0q{cT8=_^qJf+U(R1tCSBFW+F$qA3uhL5l;JXoGj8Oo2(R> zg04;!NHBY&jcp{8@4B8Or>prJX^U=>@O#EKh-^yPtNSFAAO#C1quB18nR8dNFkflO zNM8ERgl`!<{o?Z(sJOt3V-29rA@VzQrR4)Wit$ZVJN6BKjTw#LafQTTqz#D*@hIse zQAqa2iLbIfmlZt1C;Uc2bg@vu?j`uuB#nD*mD*%ZbeaUWI`;VU+^O(HLL#J;tm`Da zp~1GYcx$BWIsijHu~?AM5(@VkIp=TO;I>NP{|a*@1PTyM`8MKVTzi3p(`i~`&%!<~ zxqbjX!F{-d?Nb1IeC2VUWD3uQ zB=Z?`u;R{=)-8&n7EGq)>+}-XMo92KA`+VG=of^Rt|Ki;MER2vrl~?Odz9W54VG{q zS1IYs(t_`V|GPHY>T_pWkI4JA+QgrxX$w6Kun`~D?aVO~nJ3HWGSYeJBvN{Ho@5fDJv#X?Y3M8AVe`AAvMwI}sWFRiIk|5Cr!50b+(z&=P*)uZZ( zvQtktseNFk`F^!EjE>3dRA{X;1GS`3oa%XgKT?Cyok8S_?&xAPtguixv|V%8Fm8K` zn+O*V2@}*Qk#K^2rM_hCmoWVu#SKh$N*gNpiHJXc-eCD7u4Ui1W4|u+PqaryvI$D{ilklTNKqdz7 zs_@O5vwn;^33o#lZd&bNv$D9n5|an@t+55T6Oeiu`fs?-b1TfceKgaw@!5<#w(Nf1t@rz5+Bj|#JWjdPk3P&=dJk@X2l}Zc^EI| z*ULhep{q4Ug?*3KzPW~uLj4-{6b=)9LUK&9S5tZ7d9~%gUSrW!T;^v-PYf`K5YF5y zcPpH5s(?(Fen{}fnIjBY`n=x3LwYqcGsBEF(tYVzvN};0?=j(86ygKj+9{xGH=E8< zf$HGl;AuYL2P!kQg+S&p?Fo5HeoHx*nBU<;ThBE5T;WpQZDba)O7=o%vs$7zLUgAv zOZ#2ym9<!Ug6YB=T`i$l%HG65&P&m|3n5BOLRW#wiXZH$K8% z5V)Lllk2!Qq~*VX2xQGsnH@|b%z7H6bw*V4#=qwjy2NFgkUwe|r!pmEU!Iu1$sVO} z73-6-e+@EiT+1U0BT&D@7W=mKPfz#Z@pelrANwIoe8g;f`QAPGM;fR7IG~0t8HnF5 zFzK3L=a;Sehr5O~LWPmqX!vrgq{zsIQcmhPPKBiQ+u&XCnS&ST(A&~Khu{t1h9u&S8dAK!84=#lYsN=(yEE2L_=-2%n-A?6B`OjGM4Y>HDZX_n47*<+ zOztQyDk|d7?z=&zBI%c63A9Eh?tFJuMyG`kqh*cjsjn$wd2f4s350ecu>O$rR)Z|m z8j5}SB43dbd>qtA3@!E(arw~@6Yt|jd>Goyevzmd9vX-h*P!}f$mpm-S=WIplsuhD zW>Q?+w|Jfbi%a%~_IQo4b8-;LSKN^a8$!JU{_;DXjJ*$m$F8g;w0NHzMvOa-D;U2+ zP>{J7NNNAuBw%S*49=E0z?$~ zjP$^{wrDp#kAUZAX z5nZJfi*dF#7GnY}vRn}f`4uj=s9`32ByJf?Y{__9$5jnOx~0erfWKm_E@dN2Al3#IZ%KzR?)Q;S&LB%CY{P6ochZKSLKCJ6s$jB1Mben@R}VbYaA@p+-h z0D?qepdV()q3Yb3 zGOPNl;>CgU;o|uB5%1r|ql)+pz^Qo%p~K*1UJ+pr9`JIN#*QFHXutGNldF(fMm-`R z`k;Ch`>YSLe?U?b`G0;w;%JNxYOi!FPgMvVST#Sl{wYEs*+b@c%cZa|^+x|yUCz1J zFtYFc;h(Qo6?VvfO-8rD3b836YA22D4wC-W@A%BW7US4SF*VcFnMrbS`2ss+Hl^tz zZ=r>?j*i+`HkZS`dRGuTA6+ezV2=;BWZAqtKqh}xBu;8(I=4=Jkeh;c)|H>*G3dNt znc=BD3cPeg#$d)$KPB+>mx$|;F#Vqze+_q>A3f`a7Dr4O3H9!=<6|TKwT+Sa0#qvY ze}{NU*73t2pbx}nycx`aP0nWxl1pZqKhMl{SEd`8$e!NqMf& zf&*d!sF++43NVJ6Q;qo7DqqsbxBRfR3uB82;nH#nd_|MX}E>4GEnC`58kO9n&CxG3(E87xW29(&WMFx*{+JjPzppGY|e3dDr|&*+!WJ2_C)TK{?TJ8faHAJqQ@ znDDY7cP?>wvCUadZ=2QpWr1>D8RX3mz4Po$*;!fbLCy$C<)Cp~@fD$pfK3PShF@pM zi6u%wUEk4jHubBQ7*h<_l|-}UVZ<~llwE~jG>19UmjgIoyNKap zNZBX*KA{hXYGbajRLTRKHN&B!C6i%9WR?{RT`ExJFFx5oi<|}49M}EM2a+8M+>Q=f!UIJ&Ku>ukAQ<=o!nVdvZ@1k{x(zHezibyv4xAv~7Uw}Qbo`1;WuSJ@GkM#^Kuvst zw9lKUf?wcw8W&MBwYwo_K3OSI8L{p8TR!DQs2nCcI+!(6jf=iLCsV2%&JI!)V`5RE z=&qKiec31Sj_paU*Sz^Gd zMZemj7)@P@PyH{=D2$U7-&*W*0nMhUWd+ZFaLBy?e#lIe>BDU;6R~tM8FNU zx7l=pLYA>!vj%LEe`!5wLgei=15`O+s6~Ld{nmaXDFvpa*&d|)FCnWE0)L*+CaXbPZ2OYRCR$yPrrNQSy4Gvj zmiZzjQ0#MuTYWBsq-m-tUG%F7p*T%rfv_bCLDmcVFzKMuj7CRyt?~Lx%R2!&J#I7` zvMp~t^ypn~lL#WckXb~B%cFBy`O$G@2phzqQqDOC`*8&-FL`_3rpUkXWyBQSjwJ}g z$0OyMpf5<*SVAP2IVoyfX!Al_;NL>E(x3(nmc|~wMe6&m580dheo@1jb}CVYte(A| zwVrwpnPb`7M^G1TOI);XJ9i~c1!hZD_V64x_1pd2%-4QKN8aNgd@jy+jVmHGE*DY_ zzzS|yQxlhBS*sME0R#EXQWw>jkmj9{n(^^*%lCKqfq$EXNzwm7z+fh1IcRuve!oOP zQb0080?$EEowymD`B>hm^PBeQN(j`pSGQpPvZk7JiT zorQj0K;CWDt#xtn>2C2Ko1vRp#WNl;;i|o8&{QpPXllz|8j46l;(f8r;+0=`u3?~+52?Cyn@cE1n+~a zIH>eTL0NlHQXs#mDAN4R)$dbWjh`Xf#kQW+D=_g|qD^=)E`tAvO?9w}$lnLbM5sdA z@0q}Wj!@hlkG| ziraJh&TFXz3iXhIsIZ0La|HkQIqDkAcY+GJZTs)v?ou-m?(VAY_NdNpfM&mayRyVG zm(sWRdu`1bV$}biL&3)Gry%_J1nEY`OTKp4B+m~dC8dfe2wQ)Ug+!gBdkcZbdAAz- zkwgxieP%N4p47RFif0xEfh}&MDlm$sCt@~_oBsc!>C5Ax@V@tr!Gs!QUxq9p*|Hm3 zi){!Ig@h8aBx`nNDv>oK%37FcNEEU&EqGfZvP70C$(BqJ%F^#lpYQL#ckkEKYwp~8 z&pqdPp7R_t|GE8aQaqi%ZocBgoD&@_z9`fVExyg$du7Q}v-mj5--A5Hy)TOZhwwfo zSbtC1B@#=+g7jAZ_<)iNoY2p>M3_E!ce?N7eLD8fol*K{iJCTd0Ae3e$VK?a zXv9U)hasI-`5`r|^##)t#B{g6Hb| z9@)zePC2{WBZLMA`|tjqqw|-51^-*>vBR`K@HbWfs&V%O=t9W(w*R?-N^bO&nN9Hr zj%M!7{r)ze<=;Mys~YIcn(8RbnX8T8;h?BJ0&i^S<9(0T7#@6G~5g< zjysxuD#A8lBi1PGQxk;)kp7`ljy9+}Qp0iAsM?fBd^I;H1#vUg9Gm=*TX|FWyDAjw zUW9V8wrEc?YnUKy=UHbhMOywWfDvUz+Mg>O#GFqtzfQU6dS&I{4*_Phu?S=- zUJ}4cI`+)|OsbYYN5q=9e5m!|2mOV-bF)^+T~;3-A2)a-YHDkrx%cpO&)$#I2YBo9 zb9tR6CqC5oUZ%)A9v^~zO1LMsa`i2oCe-9Iz;`$6*^-lI!iO!a|H8B-2`yz zr1Jtt_6Nw5)01s(xF7b@$n4-@i!-7mvDU47<$)iMrru5{LO;R)6z?;GX0jRc3|LmM@4{>3wN-{pGsPI& zZx6T$w;PlKJ^k3_kl&2AM{)f#TbrG1#I`ie`MAT)%HNQ4SE>{;eM+L_;?41G4Bv5r zWyW9jM50C@^!#PrMpj9<%R;!(GyC@I+ul(T$RCx~mweu0!tO1;kquBfBsdzQteIza zlxzzS1i;nTha48)QmF^(reh)(1v(XgX?*|w_5Xps7!x9?1k%Vj3cYt^$UhKju#%>r zglT^z+_OnU6eFEL8+!Nx>B90IXeP`00COP&j@^#g)eKU^AY(IEn1QmGiz)<2o_PEu zMS;=;WMMf$n1Hko) zX;)#3VuxQ|Ie?Ema2LF)G#>jzp+BWC9jU!|%u&zB0?@EceNdy`H`hA)M?WebPW8Yt zN`K!II2}=J_Exf)m)CzLppOT<0iptLcXI>B=LO%c{hs7*jxi{D;8_|qn`lS{n3Aa0 zE}wkrebZuDc_2lnT7C(a|H#?hp`#;3xa;{J=ZwW~-@d$JSGok^_b;pZdS=ACW?Al@ z9uDh~ZY%Glyi@`yag+y$@_^U(Nnf9%HU$7-}C zZVf+-tym*|nt7MOWd@pTr6s{`X9<6pb=KPDz+DryEzfLjuwt2J+7*CHl<`k?aJ-uSMDW#Xn z%^EN6Vzn(S487aS_3WEWG#!dCK9s=)xfokL^(Bv~IsH`L_C3n!06icS4`o>&Sie%N zuo+`T9NN6-^55q_;CBQ+?dc}yK{*FW%rG=rI4n+r7#bqWzuG{$0&0W*l|CL`;HI;X zcZzH~75Jl_Ux!tEAxr^wG%r3W4h9s+(iC1P_zz-7-mW$=+lif7=V0v&RKP zCwQ^S+%Xipn5OXmCbf`M<7XjDXIN0E)18kKqx7w;tT^B& zE<7bmqW`?A-R=(8!r{X?L*Q8bNEP4D=y}26rWA%re^1mQI0Aj{H*@n>EHETA=G8v8 zBfK=0gow=7uM+MXKVvYpS?)#x=ELyoCp*YyP2B-zKTTC4s8inKl?I#J!`|cH<3~Bo z)TiQBK>Z54Z?n@Skv6xxe_`_rq#fx{5(oP}vhlhGkSwz$oLKARpJc z9U=xy2R4(;Y$P8u#IkRP(mA`E{&$WA)O@-gqnDeFJ<2V}yj}_^lu5db$mX{4I|%cO zAH`kwd_aC0L@@9gd9&n_Tqg=~BPVK}rz zsDtT-F#OMhJ-TOsjg(TLWCAVif?qlvpKi-<$vAcFgYAl^UP`Czh3YT*t}9$=lc0`E zSwp&|IaAXJ85vTU(pTAOCO2*JRo@$@FZS&NCNuuA8w$G9pkEZ!KoTa~DG@n&BFZ{+ z)z~Iyh14)n#GjSs^-%H(&v5$QFnoOfm1XAdGldNmuiIky?DM5dqz)0c_Oze=yXOg0 z!LhLKE8`eI;%0!?B4F|D9ETbQb=t)5kP>~7A`(t-1nUyTEq|{qg0yZK<6fgZ$ zQTMtS_$J?{LOHePGyTpu*85-3?|DAT$#=}w=znQ{!0*|68U31lb3am3(Gii6UhpWt zFR>tlcnVMoIE}n;4I-l6gOiY-RSxWo_j}n3l2naXEWaZ=L^t+pocgZy`Pn@aE4ZNu3+!#|OIg=#1C}z)Y|3U3i_x{^wWwtAu)%A3H6i@oJ$k zgDktn4=S%FhSP)2eGI$oxAb55)8a{4msIZ4L|f!7yjY4JA4B;bW;Xuh9jwgxn#bdW zvj`so%*0^Qpw1nGccM$^AdhP2B{0q+3yMhX&K`JA*r7w*=L;bJg079@*v2S*FbxmS za&H)N5E`I2fvv2RLa$-S-7;?_LpMFY z3VpsB<}D=&!7mqz)Po2oFnHLDa|lyk%^tYutzy&gG_{61G0UeR)M zKj}lR$wAyNcOAe0Ls|KK^AS#ry6j<$Zv*C^3Wo_7YJFb8S0YfrTS5MHwfK!viSHxr zsU|aG&bh=_3E6Z+$Tqcp>*`_nFFi-HZH^t;RzJ%d>ixy9TuBN2{t(Kd zh4-aFOBHo0T3Yw9(Au?6spol={)4~vWZOod0{^}Q#n>=OhCvKrc@krY+lSOd8X)|b z{0{o&7(|(dFmYIzahjdpO|Ck$83$VbT3;SuZUjCZ$V#0?z;;(4HG7APb@`0n)abyn zv9EloAr#*Fw9;%ldzxqrX3 zs&+Q?9ZwVOSnHe(&`$%YT}?)_jB?NG}r=4UP4|BH1&BbFxj4yH>^QLKTMpqFfUIW?-LpPa_YO|s|MC@j8BnIrLOpHbBP*fN`oEA zENLa*bEfd(Lb@+QjOTRhB6Y$F`=unxBi;FAS#@uF>{ab6x6uc1Tfs;xlr#HD_P(Mn zP4u_fmviTq@GI#BS9bWx$`TvKr{HZ$M!w$C-|cTrJ2t2~V@_ZuEfH0wibqV{&sQ=k zGAJTjPsn+|*&2Lf#A|#PTq>k=IYCR?1`^OKD!YwxL2Oyv=9)*alj~QiU*> z7I~}*uUf8!hVKVD0kg2wI}+sckTWFNZju7Wk-n08(~a!)=`c52$mYVE8Nct4Lfi-e zxfVx8_Y*~?0#iAn>{1~COVbAasg3#jn+H9xR&G0nCe=!9-VJqAV)shSPnc%y>1-u; zL+H-xyI^kESkyi=`kH#~TYiR8Lr-!dbdOd+$pWui#K?2`P`w%rKGLQhz0 zeLDeb4fd`ci|V6NsV+}p$91vEB!4{$JD&ghmS<+Cvn#hhyT5Q1TfN}sp(;VmC0s=f zB21WukWaZITE)J3fW|R!%40)Anca&SDTHTQWg$7T*b7hO4ZpEg~!56 zqNB@2U^*#M2%MVga<}8Ss{d`Rrn2NrgvH?QpUyAs&*j{FeZSoY10KVvuUTe?6R!h= z^34C!@ra1-=olZ!2v6o}qzspzRmCpuf`&l4D;Yk!H!4N#lc7zm0>b&m>9C%n16PXL zgzB?u!gtYwYNZq0tR%TZcu4jbW0{^vsf(H04KwMpZfpN`Y0Vnbs=84#Hnm|B-rHfY zWO?a=p>u9A(sVz~gBm^{KBV%Iu!QTc^8>|ZOV5XHCJld}1U=U8#1u?2M5V4d`yHhD zx(Y5-IXj$`IiH~p)MBhrge4R{B$Kn8rvOqZqsPdX2IUgmH% zMHSzcjFvbwbds7$iqtK&4SmXJ&jE~|yAAHmHLCvL4Fz2IH|D2|PZ{)$a)e~;tPxR& z&C{-){#h!G@ihbgmRcCaZBXudAsC-HCV)9=Gpi@Ksot!=7;Hh^0weqBgF8`UmgDcV zMFC17of+Rdv2UAUbDH!j;ViLRvV`TU_ah-P&x{_$hKixQcxMPFdHG5n?B{!U^oX`G zk@$&>C!o?=chP+JAfQE>TlR6NV_w-sV{q-2o|KZShW)D!^8CGFKkBUxhgo}LB1lj` zAb}p894rneeoizl9^i*|?8m1Gqo2(wmg;O#eDw@kh)uh}l0WLC+euKv{$4(4akPcA z^h$YSE2{Y(%(}X9z9N*v(yq@-R8)W2@C5zTM#HJi)DYfOpwi|KU=J|UxB;0dHk}oV zJ=s_}#qjM_Bpc4=cQUdG*WcH=r}@rW>KqKyA~ZNc@46t3BkTbnPHXmT2R3e6(EXI` zlr7~-`n>$U{vxT|XA>a?w9soHVv+0=TCFI^)~3*p2ZYWSw zuflR1S^aq28+%qXh%hzJIer!NYme^)Q0E53rR>gd__LBZYY%HBNqZ=fzpQysUjx?1 zP2fO;G6N@YXdFYSOn>S##X7;@TfCZ-(MTvo#FU<@9w!OrlfrKFq}%`aZmxH{DfiAu5CB`{0t{5ji07r>uc zJy#)xtI*KUxYiBGha>o08N=lq6j4(hK-fYnvm)?(@4h z)Q(unH9a6oBmW-2ns!}*v@9=z78fHE80e+Har!B1JrUcpe=vwRHgIgM{u*GHu~re% z;rg^QgDtNMD4;K>%?O}ggNc2r_&SI>BM3J_1Nsas?Awfz@eDmkkM}bgZSPryou%^8e{u_36`vkxG|-3gxIf8xO!K z_RyyS8jB~+Eja_$s5x*;mxGbyjLqbM%TBExZ*$V;b&1czCA@-OjGyJU-xr0c-(N5E zK_JI;kg&;0%O$;SQIfD&ywg1&K@Ftp3>=fA`pGB%w=64vd_gLnA>Jtd@tu9ayKm0n z^1WEV{mUVkQ?DeFu)_G4qdqY>EpLkf>c{-csx%zDCfOi_^?X0Zd>cAePL4C|Zfr&Y z!nuH8t~$J$(?S|dP>kwO!pYS|s#Mk;SdFj|{GymF@bc9jyYtm!x=pf)>_tJc4t0K* z|7q9_O}a`T1>?5VBJA$Q_%$v_U;92G2ooJh&*$Y5KQFxX2s%ism{%q~zi*sFJ)19_ z*cXJM9kQ{0gQe<69XlLS#%RyH&9`u8Blm~H^X@rE>gY3hiF-}{C!+m2jZyDnw1wtV z(bnR1Ca62M4xKOx^@`uwtuZq!^$#=EleB-Rr4WjyMco++ANB8@DgJ1oFhw}Y6VJIZ z?9Kf5nWK0&?K~~^7`uzsidCJMbEuIHs@IZe1-n)b$#GM@6?LJ?lQur#t@zw#G z1PB!dkp1kZ{x>ggW^5v3|K9EYYXM9ecGee{UzQJh4=sgQ>ix0>c(WJ^3|navIM*$Y z2QK#}KqO7_WSBFKf2SmXdWvvn)D_%h^5?EXiiw)%`{k)cZ-wGDKTF$Z_JmN*nV!Cp z**Qt^4^Bw*GNdn65qq@#NecP^`FTucpPsCPM=`k6tP(U4E~xKsZlIu-E;)eeJg7t3 zKzOcf^-rKg9oGHo36=frqXo|_7nx2T-)31(Iv^9{yaB1uyoImNx zQ5nZ2SK(0|E<}4WFA`B->ofY?tuu9AKpax)P6yBYcF1;$fxNTjJ5O0PF*YvX#25~e za*gDq?u%gz|1QT+Bd99WL!_>xjZelAu4g`yOo14~Uy35zAkR<1blBUE}8mjxmuUKUR>)b4f7e&Vc2b8*3T;(wjvnuIW&+ zDJ~451L6HSb9c-no%cZk2(mYxD|xnj207uAV^9mgx|&ofN8`4tAMJz~cGRvDyTiqR z=9GIB(E^=`xqr)L6XOOP*8GjE{dazip5_^zNYV@Xda_S{%U39c1sYEVfCOC(GqN5^ zDhjg*M&mNdSn^E9kA4kG4r{`f6Q-F95K%BpCCAi8ahk!IB`R*M<>;cC8>P@<5R=ZPKdEHg>mo7!PntcV&q;7@K3x@k>d!N?CE((s0v-{@$rxaX# z-^>dpZ&^vSOq>-4ZzNS{l1NVFqC4O~8|d@P0}|%&hPlqV3(QJ(+#&{4TogU`gag zS(}%S2&RKYW8X-Dw2{Syg$n~kM)Kff;`sJzJDhP+D+JQC^HD$AHB8KB-!tc#j-OhI z3(8xjbeiU_1Nf&7DhyX20>>TnlurJ+eAmM?aVkbjTWef_e%M&@L_p@KB3v5sCAhzp z1J~^?M~ldd67Uk)L@em9j}l@J>aDG%Bm_P*7P$fwsOTHUaA9hg`0n zU)AS62Wh}#K6zrw1{7?hoggiGUKx3xddGz0++$m)hek8cI{#iQlC@0kE9!){9StZP zI5CnFfZ@#*L@I*MSTg;wHh~O;bhxtlXJ(3V@nLKdDQgi?fD+-tkY2^GZ10B-8==7- zXB_yUv3arO4fTy=BueNr`J($I*d--&#j$wlF9Pz%`n@mP*YZoQ($aYRy0Y!GJ}@{B$vaxAF+sJ2I!IB zV@%{6yB>8hZRk4+6xw)c7^Hw$ z2N-2%-r3O+WiOs-Za`!Esc&lyn68#T8|?w<^`{w#!?n5B*A6&(o6JxuD32&IU2WF5 zG+%|7>Hn7R&NJlPaU*!a5&MGPc9uHyK!_IQ8WVhnO$g;DImz7|J#CO?#6r&{76_jc zDiL$YHmjVHvF|G-mQwC|SS|2`x4Fsnvn^gPPYn<1g6BgTup^*BX-LQS-&qY4^x%Goq8kt?|-kRkV zOMl?uNE62Y#a3vb%QCRC6j+Cm~ zC(9g4^RpQ!*sur(D&$VEmA|Ehz@?~n~-lW?zy*y)V_{vLhQ-GR4aY(qpPsUe(*qGc@d(4E!HEs&fd zsn_c_$n>d)kjBt-ac2IAy+HaX5B1ADXT-_)42|=feHv+7gkNE8l84f?DPxpx^9Oj3 zSYl28e5AyiPOk!DKlhPs`w02l_R4>;2Lg&%pt-Nq7<%k*-IwnNhbhz*BS)a=scxA5 zfPz!?+6QmS=U$(wmrOmulTjoan5|s}N&3<8?Z@b#)wi0ey1F8m;tKFIdJcHKM_-;G z2GjtSs*hw7>*Ss4{Vjtzaj0dx%?XKn*3{6A=qGG_66EN7_HA`9vvO3ykmWVTHjZYl zO%~<-!X{||gEyDl>5ds{O=bLQ44EC6R4!hZs->ipwqz|@1E|e}9#5!gS?@l->(w(o2MP zMgF|f8a2+W@J#uRw7*j@8gqYO!1_#OUo__Zly|MruZVo;=K%tnlg^b{1|6?Bvz0Q> zNkn%KkXVy*qM8VGh*-u6?mQ!#twtuGZ-w3o=NUyjUdh;Xj=7_DBt~8%RhlYeznK1> z2h!0!dL`meGjZ}hC*1e)B$j!yc{Q#LywooHtU!?H%E^ zhw&SR@!%ZSeJO9PS1&Yt?$p;`ps25kYhHN)#h~=pu%8<$_5y{Ak4Kl1fEGGG;1Em& z)53AR_x;H!L$DpkySUW*e}Z?FxDJwJk%vl91}#fiT=?^*H8zVB zUzU!p+U25o3D_+Nw_D$=F|kUsCRXgvZ)Cw2Y+ zQSO4G|Fz%*qA=%srgB`x1xq{mKMBl}-L znxF)!48Jmmmi)hE9iQ?UV61m9YWt9MDWnE^!GZ{-hI6NU=nfwFIv*?6n)JZ_77Rr9 zL(7rb)vH&7LD!5u2IFh_LAwYy$#5{nhe3u~NbXv9pjzRC7o`>o8}S+Vjdb*^7~B`e z!T$38bheXIY}0f;%)k&QJ^R3W6_pWx)_czU+jBmkuN@ssVoVPRY+5Var=9!OZ@u z?~>6r;;J~Kqo<2+%=61Lq^3bVbPlgm3{FMK{6;9*1`CP_{_1;u)Ct(3KF_d zI^1Tq>l`RXBu^E1R^6|0(Ca&OdGmWxnJ$P4V))zeEqy7y=fA%w+5JAUu=VmLT&C3F z$;pVwZc-U=C;n9$ebSkVE6f%aTp5IBwe!&RD=SQKGe@A@o$NIL@GPPqU$ z-#1I@Fr~j3jHAqDGBidDPW&m5H~MpA!E@xWsS#K1#hJ%(8Z(8up@0&mq!_=B6eFwl%BWM$?bXu0hP>L2%pt~D@3Tj5 zANH5m*F2LPFi5C;fM`2Derm^IP+GE--{<5eXZ2c@p-FY$~`WG>?q zrrqS=hxXkS2QZ&^Sp2mYR%T=f9+zY-%D@{$qOKUv!dG3JkxdbVAaRMuJ|$!R;Oq~` zmV1oV6FP(aVauDYNQ@0$L~Zh4&B-n$v+3 zux_K*Y~E+1;R;#7rqa(r#L4p%E($y$-HUuwRf-o752ImDnZC+?}_ut`jraqbHJh+}lISYRz zl3vsaf(%Zn#>DH*BT70f%S)_3)ih`CQkTKrhsSmo`iN4qeAWU1jNcW~MZ4q&1>?zr zqs_TH@`D>CE|}6W-_|nO#P$JgwT2gdlijdO#cshj;Tl10&~s0zj^64H)5hD09@Pt9 z(Bzh1j55T!@M<|gy;+_zcigBp^;YU+jAh8X(Vv*vuS~;G>j*9fGmWBBYtqjRD;q}? zwj9zj8O~$wy^SvXdyR`veKy#@|3vT}T&DkCK1Pcy9sIP+d2>L?^$Kc9MPL3(@s1ih z?rA=vbwf(;p$ryJvT;Ox>uUb025%cZIEeWRhi~kNqI&!43x*D*O8t$b=6>i8E%swP zV@JZ5piYH5YpLO4tF z##TvT4RIC7)}vUdgE$TW27Ojt)_HVak@pACpoATvvBB7_Py!~skmsxApnBp@?ew*a zEvYrt^? zBW2v_iRZ-no%PX)djW4x;I_j|FJa+M1`+#M4@o z?xzBMCj&>Y-B9V+KfA~bfYy+AOwv7gU=oY$b>XuYiU+C`T)eH`6WRyF%`PVg3v7(T zsUJ5}0^-FTA<3P+5g28$`n{WS3lB$4T@~A#GSm(_{$9Id#unu-veH25g|uf? z@3Ty|Ss4r1v2;38T_AE3NvW!qavV|O{-OR^*_58^s;2u%b8m=4-gU@^(wo<2T%mMS z_hr=ZbgE>xCq$Z);ujfN2yxk?EeP55InB+u^3)?LDuKc9P$E=|s%6t)Z|EJnZozo~ z#L@Ng@a5^}EPe^)j8##wNkdC4aJ^*^qOU{~=sHf~cy}QPfMkqN)(gqVjf|3d7l6#P zkT8OtNx3&h!?}Fu7Gaq36td@4T~|jysUYazMq3}7P6UW*e@9KZJqC;b!_Px_yHWGfp7%M3OaLe5#}v=dvjoVlEwKxvd0D`0 zb2H&XdH{i&3^AbLTOBt*#6S$T`KZMd#!Ks=03tSU4H5de=Es?O6iACbsT}xw$T@~K z_U%2FJB&`}HX%OnVTvxMvJ~vc6n(aQ6MDX3619Ey-yIV0*ZTh2t2PoyN30IX_E|>8Z}$)M^m_)A34j5j8dieww{b&5{) zG=^l@^%c-*JNz*?Ox^h_4b|^1YVk(Rq_RJ@tt&f~#{7BCpZelHy3-)Se zeAjNi2GtMBje}-I*-y1Oa8dFz16lcVIps3t4{`i#-&X#D;3&~nL6hZ)c81!+`KNfW zEAO7whsW39Rn1ACb>h0}9v@KI15W&}8hj*4_w54(Qou}#CfFC2%xl33zTrY1U3xGH^D)xkLHl!6 z#t6Wb?hy4Z_!O@Vv zjOqFNM>%(>KQJEesiqe$IyuLMj%Ec__D543T`$GP7IWtgjTG#B^RyDb;;1WyO}e=J z;n@vnh%@e-zYLJrmx-YMN6wWdbb%vg4S8>+Y3eRdCA-X&)vc8bjW%4_3qR^F5^~Hg zMl{NR7EkF6uaf^9D`(0v7J7$mzdmd8UA04-mj)lqc$#wit+2d6z9tRcw)`E_bZ_l% zfK1iC4|^xfa$0x&sLT{=(%*h12K_zT{9b>i6bB#VyK{Lzy85|ic27Su9~9R21u3Wh zgFzb$o*!PoVUu$cUD#@I^c42;ruWd6^N>87r`a51MXPu?e9a@kn_DXLr6tufd-NM3 zRr+s@Cyp4nCFr$s#&U;LC}1=Ei_xmP`74&%hy0x)nridWTTSRZ2qk9TCl%QX<{K?v zm~0FIG~f;trp6`PPkRHh9_^ws zr}4m+ZS@GYlveUy4`Gk1?a7tccCmpzc8eEe66-KjA9s7NQRzRG6gfDD0(uM+3}4kQ zH{mr-xa}aTXv=(*%9=ZvKKycG&bu4_wiu7~xB40afx=6mUex~Blpd;|sWlxSNKF3m z#;?6yfD8A7(H#Tr;$Vl-ZMx6|s$6zSM_0gu*M9wa2A*&K$Rt1@WKAHM9e@w3Q(6Wt zkP>y+;CfiASp3x7F!wevL9UR-Py2;tpbca3BbdY~uycWB7278*vTa!7Wcqjr350|E*mo#v~D3kCip3j8i30t^~ zbKfN~PsS}9`&#bR9P{|t+AmX^+Ydd!e({V}sNED>sp76-lN=5IsNg$^4IG>LSrWMJ z;s>?@g(HG8lRf`?6*C9lQcyUf*A3Iw1@xW|QNPfR+CQpK50WEx*~3DYC3n7F~x2o+~E-62*@ zXdlSkii?A5AiD&;GgLNC(Z7bX=tO0iaDCO8@@STN6>PF?01yMRg$k5Z*uUczC}$&Y zzRI!yklz`&{VYvdk*>x5?e~}yT9|G}ig73;-ZyKMUY2gL4ZD1SiG+O0ao1S4M(cBW zx1{mL&Q;MwJDEQcH1WaFgrAQQJ%nn4(!0jv7`^9l6eFz={W>w^(hqRz(?yH(wv$8H zsB>TPYYqecF`Gd#C!AKTY{J{h{g#X>IDDwfLEuwMIjJj{cOg5f4;d1VJVB}s$Y_x{ zN+g+1OYS^b>Z+JPEyAS_+XP`8Z5MgPiH-t}5HiuWE_K>tzi;;KVO>Roq zmNizJ=AP%c4Kdf4I`hVoq=4Vrh&GM5lxwPVPbmRyb7U3Z%W3KJI^Kc7hL9Rhe@s!mIfA2iZ84#Y z!nkvi;;}Z${OBIvN|)7oRt{tJNl( z2$bTfNXNFAmRS}BFuQy`=K0_)6ce>M27Ev>o#tte18Qr|Xzq?&->Za?kH$I1O5_|g z(wQZX6C?F*hcah+`efdqbZuP#l^x2t$1J-1_9v%sR(9{%n|aN*gPQKCkwZM`ILQ$V zT)_&42_Ep@mVU40e{exC(66UUhs)L`vgh~S@+9wjZ_RU@EYP3*!GwqTK5rE`x@z-M z?x|LW@9}4uI`O#C`RJ~mU1&!41$2VUjp1v+jWf{1Cv;1pUvX~QjQk{)S-+$OxWKRh=f05|HvMpUEM3S0fVccbcSQaPcw3b7<7*@I(zi)Q0w>p z`ZN4|qGma|k+_wGpmTdgMCsqYA^P@q_r1jo+7qV}7n&#-bQTBZ?_-~ve@7CN5oV*P; z>Uysan$e2#pt`Ie2p^t3>dQE-d*6!6Up8)5JfJ01e|PPuS%htIaWUPP!=qXd^T_sp zSZ{T|e(r9J`OSm4OdF)4mzt2}&fSZ>spwP3K^ims%bb!uAbx#v-}Y(9j02Ccg8*j7S*SRjC6#;B)l#kwVY-_FILq+Bo}sbbL)gRjce+$ z*Y%vY(r_(OQ>`6AshbzENQS8Htsd)S3K%S?oRkEGg>~e>((PD0Jw+7UoG>XXs$*a; zK2byHpOJDTb5F8331f>3{Rykz}+IR#I@mAc}RKz%o~H( zJb=3O<-yGswbclEZvQGlF>kNDSKuW3wwmiCg1Y^`ukT42@AXk+wh(kW|ajIxRCBztjI2Tqyb0h#!N!A)&N5*k3{Chg!L5yh7lVl?F}7 z#zw`mb?)peJ>?+5%lXu--- z@rqFoWW54YG7{@Gn4ate%OA*uyQ4^L`l95C71NSf&9S-ZMo{gVZlSrmZoG`y&o0UH zw6d~NcoP{lR*6*8)$Pqu_bGC_>s&lQU3+g;3ciGN=qBSzj5W{7$IyJaLmV8R>ZU$X z#Niiq6{qP_e}(zmECaGfCY?B77fRPUpk_TOY5z58yKrDd>i32*dd*a=_d9_s=?#=w z>t`vZ^a6S)&Hxr=JRWvgeDNjBK@bj8@=9!g-3p$T{HsbX>|MfnTQUD;lk}!sr|;F% zT1q26#1mn~Y7eD!9+ZF9e4EYoL|Z>V3!ghwd*lnj>~-|3;E7Aus%}9`w&F`0?%Fy< zs;(=S=qW~fUd++KV}b|OOI84jzR-iOyW_m>^BHRJjFPj5I{2v7|2By~OX%b+#lc?* z8CN-+G-)C>f{VD#UPw|pSKVAqfYa%2E+DO|p(k>^DijCtCMz6L7A3zZ%cL7u8^Z6{ z!O`&{2qQ;YcuX8@PP3a@hMUL-c^d&=@pp_rWl=A~)Mt zV3i4Iq37lQszI%7lWSc*JT-J#E$Xp2IU}DB^Y8``eo0{n(LblQbVRiH!D^6XF~LP& z&lWaD!6b*VnKSH4@E!E394OX@=-XpO8KGA?ULpbz?QE*6IDfh7ifsz`>wf2e)^mwl zUIQK9cB+)=fdO8AtCHGAQ~map7$(e;e^EAn6D{Y6WtYH=SpY1x!(#GG2z``=aFF%9*W>jeHScuuMP?5F!BZd4NK{o?|9TEvNaY zRB9T~S9bc?l*Eu1X-ff@b8>XB=)~16FEs7Tzo0HMiCiyNu{QRMN_cGX) zgM<$98rP5*IxL*oRMphfP+_e4YiE*tKfXxWC#+%E`8XMpO>${L_uwP*h2>?sDt_>i z7Gu~CXmtLbt4~hM8938~4Sq3+c$5a_FgL5X zPUhmzT^<;H9>>M@-dCd^c?JhD(vTu#jR`rZPTM%urbYQM&%vTYx zL_4_Ycw)}69JY$iQRY03ZS95rUQgad9#zDHO1;ZG99}s003}-2Jm*D*O-^z`Z)hUN zD6>4{l#yHt%`+fxi=c$i0o7Uv&*HRi zVxY`w{;Oh~fQZNw==;BAQ~RMWU#9_Jr(!o-%NFN32!WpehhA4`aD)MTPG$@=sCq)| z@t0VeSwO_FZpvzim9NS(oiU8REa^6Ykg6S;=2i_i<*1{Rpe(ItqaI7*p#5L+yp>$v zFB1Q}=uj39SXOjZ->rE$9az!{|Evrx!qxBJE{$P-I*&U19Exa_IVB?`gIO2qt8o?s zgsFSRz;t>h(1)StM+?|gY`5<9?Qj0dGpPSDL$&g-MG1mhi&*j#&wMgD3QJ(`Z`JoB6xD#rg6^3(Qrl=l1@mqaU)B=` zfx>S<(|-d>C=F=hh6+&37Rf>^w1f^rtC*h|+Pj-G3kv~{bEL?+!0qYV00#^CwwZkB z>noWe$K?!ZuG30bruw|#%>d57$wS!%!kC9|Pj>QK+hiv*KIrsmzxEE9huoH|Qr0rB z(I#EL?&;=WsAJC_h2-!uPpm@;0G4c{PHm@LpFeQL`-qq1m@01L4G3hZSNiHlYLb0$ z-d80lBPV_IP6>R{-Vcwbz~%-%qzNBHDExpQOTMsfbmuuJ3E)Ob zQHjx`)kMes6ZnkM+~E2ZP_tYiv1?to<@w>4H%03EMX}wh_n4(O4HDN!BMdH&;w%=i zvgY^|Mo;UO=Ql-N1-;y6$S)o4f{LF7AaX;ZX}kJsc;J0G!wzOe>ko+oN9@DRVE`%# zNA2lzmT&2s__jW4wAB02JPb2zQ;xKmohz3s5RsFa_wbB_*_NpRt!HFdbP)W*jK{}7V>#sM6Qi4jD`la>apZh z8yr-ukm$m5;0GiWS>D^o+G)a9LwMjAiP$KEJ=%p~ZoaOzuVJx-Sh5FmjxnYnkltp0ws(-i1{zd z!0Y&}l`|hZGf62W#38hN^8KhFg2Bs__#}b>;}lv1RBIrY>pp+d{_C6^m$AxI_aiNi zUYh=)a9_a;W%e|(k12-zK7XFk?`8M>(lS=>yg(R$lWVGE^g)h9@AsO&HFVpX!bTOj zxX!qbrLs(+nuYCogpItjfr7ex+6p?q$g>&@E{GQs9P2sKBzD8hl_iOqViXxy~piwK8-H7a{+e5)C*8)t8E~4eG~f0^{QeT7l|| zGoW7f)?WGBiHjq9=jX+7*)=N;e-Z2eu<`7TwY(i3-8Nf07wNCzF-kO_f2qtzJ&FP9 zi4HPM(s_ogx{skB#mDj_`mmRhQ_t`A$MTE6BKZBq7mQuH|GAA$HmYNdXMaJh16o3% zlv|IWU*2-#O_6MY80NSfSnf-^@K+_P-45{f&zd+xEa7!B9^d?Y%pminuB$TEFc-^Q zMKzbZ$G9nTu|#|xylr@YMFH2Wtfyhwv&PuQ7?rXX7ab*#m-*c*RZc`)`P7nZxKEZEU>EbAvs3C*#*mQYgYxS=szpHmT3e- z-ae6Vr>Eu*mlw&_uaEm2VoRzo{snj}U8DoNA~bu`&oJ7SgZ9<9_=pVDLMlr|NL#~{ z;D;-#lwPWQMn!H-_U&K@sn0u7) z17BOpaJ79y4|H?qkWn|b4X!p3IH)agSaXhr$&ZB)vzYW%9Ue?1Ia@TwvGZR`Nlk6# zw-?Aa8m<8orIDv0n@&3O3k!t;cCwH3^}1HNAwgp-1JyU`J?=?@s}ysS`PWR28&(F3ltO zkZr8n?EZ@SNSKOSx|{4{(Jl2dvFEqv;st@)aXnIGkCPB3IeCffSc15RXaj}%l?wv( zZ1p;qcRUd?s0uor6`H}8Bf-{}#_W7!MP*344_P2q<%ihT^AdPTq9vWmczUKhA1V*U zl)gf5XaU5$e$5wHmjqRoe}|x_GF>4E@das9KJ08)J`9~_;+dx0po1>D=*+Zz#Gh|g zd#@&Q7@OBP?Lmgd`_Oo4qI5r}{>Nw-A>o4XEx+468)GAQ*T`-83ZIwbhiRmkROY70 ziLLYZLJ;v8&J1nBoANXbSLaKzdQWvs|32N;KR}-3p`7_?srs~W2ET#4%Fz9Du)(;| zFG7JfN>V9*Er|pzp>>eTQ{I4t;&0!3>a(D}ubqhF>ddH@s^?!gASj%6a2!39`TR)?dFecQ?$qG7u6 zo6nyMRWa2wZVC_nqAi)apY*;ONr*-INqF2y0*yyg!0})|6QXox43Ac(?Ve$>2V*6p zzA^SxwTcEHlbITtGmhs}pW5-#v1r=)6h|L+vYNy_w?kHl2jeCwHT<)f%?)#oI?;Nz zg!>HRNL@iKZC98R9@vQ7#8w@4U1p&LiqJ^w8{X90h^5qw^_B{c{Tt1j4;TMdht36@ z843B@=`z>57m0jb^_u#hmR91PqCG_k4OiWh9yyT5CeP<6czZZ{kh2}3g*z)^?h1L? z{n}ZNS--3t^QQ?#dy@AgXencr^Rkz=cf&XgSRFjH=l(d4YOl=A9N{>WwSv-e>e<+G zc=A|IP7eL*_E;7}gHtM8(ZMOvi)k-4Jl`a6_2y}3U_vL-eMjn~R8`G0_xc|-GHQ=0 zL>@UTzIq&W>Q)(k@|{}814f;=eWjz%DLi*g|FRFsUi^PFy=7dK-`71nL3ekkNJvNy zC7n_NqR1$X4h;jMG|YVIRzO-9KyXyjQ9yE3LS+yTq$EZ_LZk%gcn-hk{`*S(_(INg z&faUUwf5d@p9>v*Z5igS2A?+hDp#lA!;!gxZKIy^BP*VnSZ*%vJI}&V^Dz{}GG9v_Wbtx7FNhc5Bx_L|El3gV9Z& z-COaE)4K}%5%*f6Bkt|?3uDC6g()M1Lj(}~O)%A}!$U8XP4C-*)o?%f3HEfg5E&@* z-ojV!g+V`CE<75xW86<+(l&Yvcj=X|e*YyE2w_m@=~feoMEc-$Vfrv25te>^ly56c zE4ohaH^prXLcZpmcocK}2Tz^D(-Zsgg(LQm9J#|II97j9C^J7Fc`00|>C(W2jrHZg zqCVBWu$OeIym>s%s~TUjniDQ)(}%mJbAruFhd$XM?m0Bp9P+_y_5iw1=ACnSJuaOk zZdv*AW%9fGi(oFVkUF??1fis|rKhW_tJ;QBC#0(AV%UWU4}t598-`>%FkI{unu6iP z>quprW*ca8JrF*Dpjl3;?Dk*2KWuK>3!4twddMJ<^3Q>aC!<(1 zWA)_J7P^I{Tq3e zbrwZ}jGf+N@egCR_b_V2RpL|P17gR({F?H%0IOupd!oc})9)z>m)A-2%5UWq6{T+d z=xLo!k*~eQ^_6JCmtuWnSCFkVge)Rc1gh^5CgCNOof`O0iu8UAx0 zjrr$}izs);q(fE;|BX}@{4UJ@*U5~@O6>UTLiKBRI@wD~?`dzIuQa{De_m766n4>+ z==;0TDPNMhY&p`>(r&)9U(1p(>6exVUfK7L(=qRJz7CW1g%`K9U+Sr-cT~VhGgg*{OL>M z&vmql5wrR;C0Iy*S~bbodCC+fLLUL+{AHh9nWMV@QTkX&m!0*RXnQA8s$D(1N<}2- zT#iI2SaHsdos(1iQSjX(`W1Z#Auh-8^XXxkW_1fD!5Us>NDNGNVtAsUJRHy)~W%O1qgVmH6^q_L8_lq|+P73}ri)^J*Ab5ntL!50|;rUAjn2 z!j42UG>N9iP0D>pE_?NAMbo1>7dcA;<`PAE4Ym-sTO2Eo9Ite)xjGD@== zjb{AVUj-tYr<`W@+rTEB7M^6S7Xc!3ZjSw?Gg}^~S1#Z5`0)F?e2r&C1rz6KG+W2+ z?S5&>D8>l2frBB88;y;Q_lQl*z=MAxL^82H4;kjbD3DOl*|ll~8kl!MjBKcv%)WB) z@WcGn!_w%C+V1+itkr6AK|z66;Je9Rcz6kNc0N#T;p@|0;84;_a4odHb*qPzuLzw~ z`amLmbB3TpZES{|xTWm|gw%Nm9CuR0PWosC6Wa_$3*uFwW5OQBCOfs|%VMvrhY~9! zXuWUl?!JXT#XDaZI$mMWqehBY=_iK`OS^uea?)>!F3O@MDfgW!ABe%tslHM%QAbf6 zC7&Q#WAOy z9q4@Z-NXMCJW+Y?0afXXDE+~c5xX}X2!Yj5MyOXyM@QMG4sk{Bu&d~{t5|a5mee}w zyea*QExKI_^G53b)(eEt=IKS0%l4WX?G6#oyZsXGajw@>}ZmH{#g+=^28 zg@uJ=&|F%~W@sI`WJhYa3kLt48Lq9%jpCTPl*^l&3&siVTVrk+_0I2Mfoh}vdHKjd zL+dvdmvgVYd-ra?1x#MeXgE4FA!%oYB=}tAy!~ z4EF1usJKmcGSs;i;+;X&ozgQ61|ExMPFmTL{yTfW1dz5X#?+m)3jVk+ zXX!fZNns_2&xb&)+TM>IoYuZP?72z48%Ahf>SPo-kcz1>96bv&bl?N^X#Xpv=WUryDVBbE?N^1(V~I`L3FN8kY- z_b+Ht`U8pXIc1ZO2lJZ~>FgmJPLD__es4zN<6;|9r5DsL1cWd%#gM;6RuTh8tU z_!X&;5<<@-ln8!6f?W4ozJSZ^oYVT9(Oi4iBDheRjoRo-11zdcW{s4@by!UaR!^Zv6npY$QE=O(0AbV!Zz-pcSdS?X?fKrMI#hKd2-XN| z#2Fz$aJ_H51OYgSR5cn8Z7EG!4zOd?5FEPwYpIK8l#eMjH9%8}S(YtYms1yuM6 zstDYnH-2|g04=g%K1qM8yAAiG61ervISkh+E926;s#L(I{PfHu3;$}XAJ|A}ClXqrY3nIW=>7dp9Gr?ar;1 zRPWmd0yOq?=1hW#Phx38I2_i~nK<-i3^bH&|DP9NLs#j+u`t%P?yoY%9u=D7K%*d^ zQ=fYy9C$Z>>L@S#dxa6f7!Q!e7Bz3ze~0wI6WD6Feg=?HXu;3zgHwooAi_eP8VCP}2d@&~VT4-d$=TPnkk zuGHMlFukFq$s3|jiI5RgAaVd?U`w{E+BN_f{Uh;wtlE!PkZU@lQ!{gfds6KT12jjn zUOBEPuG|iYO1Y8A`Bcgi>M?#s4h5-rTA~{9A$Fu6o&nVs9hffRH?r$MC@Zf$?fs=U zB~7ApJvrJ>C+3RuLmK(22ec4%YuGxr;(^m9X>i^VfoTYCn(9 z0!^tqHjY?>rx_)YroCv3n|*(sk5T`@?9xlE_7q>*X@*bgG+PRFWv$~7RBFenhjdII z`&xdIwCIQ2D!sTGfOV56Oo#HS@_I0X%YlR*aXR#S=~26$h1WX>+C09wFf(g?IJXFw zrIeSRhI;(l*TkP(jYFzxok$}Kyt z6o&p8_sI^F}&CTjNQ^8K%>Hi>JfH=tZxtx3Gqp$z* z(OGgG;3pmvC&2IWMaNu=@b?-@O>sgNFby_A=_vL+$_;n27+IYhPTVKDW8DUa7oTE( z#^&3(b!}c5elbP*R?g^9BFrq1kdG{t*3Dw=10mSm2p3<93V5YQdFM)Z(}Ej*znasM zqLpRn_KLW{X!o1qC%He|9Q`jA>|D73Oy%#ypt%s^vqv*+LCnIsO^0?RtNKBs1Qj4EsdZZ3US3~r+_)i- zteil)E|c3^5Bd(&)ztysjg)ikq(PDLblu$WV(#EG`9IbzY@t?0Al_||s&?0O(nLrk zA0YR~v!XN+G-FJwZo-U3>6t&;neA!q)6~PA;`uBDD12i0uRa^UO};*BS&1ODFT?yO zqW)Kf!hSQ^|AvW z`26J)6SvQnrVYQa`HYGT2meh29%_M@Omk9Iun@eM{tQ$6*Vn@+LOLjiS$ry>_!Gl) zUYpXM#-93!v59j}63rv0VV*a_Y-SCAWC&gmp}Lv5iOx>&EZ@Msl&<{n27d|eRg?6F zE_se!R>AHK;Gym(J+3 zmoFb#lmeIl`DA3hT`ZAY8OPKq9XA5)VEo@HfT@mVeO2g}1MdeLhvHwe~>agX#prvEjaN+*-HM!2Ymnde+Mw`(NQ6^+V_Wgl13nX*Qshx z!6P}S=<|F(-128I*);{D%C5VDdFpE4|HW7e9ZmAK#d*&?${T$~Oa3L;T;U2qe#bC=f1yY`T44@$fO7rsb8Nn)Q zbvM8(uHC&m65atDiDmjgl7275Ac#Toe!|_)=F8>@<~3D{5@_O3rO2aUFgD4~kq#x- zg3*wku}HSJV73kD;$Q;T7706oTV#VfKxp*R%c2M#Upk#(wNN{fkW*U2RaKem7!Cqr zuUHJ4Mb)b#WXJHP>&^7J8+l~!9M0SP8xrQj^s;bs!$l`*tJVdFr^Htt zk5b3Gh2S+JKKH8`{FNK{h2#J>W$Wkaqo3#OjLU*rNOBs$r@bA@oO1@HMKD?k1F)H{3BXpv@?v?e&KhiejiEL*33I znms#EQE*QgP0@8_@i*)w_Ul-cYiKUOqn-8r&e*Fg}T zg)vP9=TfH6sgmup{qK#~h*&Akf)V?cwVZpxN)eKGhX|*&3PF$@t+}2I;m3U_*`%^h zHks8I9|Q%-`A=(r$RVW8vGN>HF4yne>7C2a{t9#;{RB4@>enmD!ahfZsH43rAk(~7 z2$Dlxy3c9KwS5woKYnM5QyrpM4jX%SxI&>MxFJv0C2Nr{&6XV9^G7g?swqRW&d}Un ze{yfErIx&+RD&@_d~n-Kg%cF??F@2!W1NU2442&qNas|9kg;V`OoWx$BVAbALMZrDO=!Q0Jl?a^$gUwNaOBOZOW` z_BzeWn1&B`Fj}{+bhfV55UH`=u8yN0R3S|?Q^F~t%#&0_j3jC%D2gLM)2!AVvx1d{ z!QtKVe4OXo6RCxe>i{jYdD$%&87rDKYr|T~S8OGC$u{`+3!~Q1_VFAB-F-1(@jq=1 z>}_*LY*$J1VcHRPwZKhv7h`E`t#m#}8r8M^w^w#(Z0u7atDn`AhX@4X>_Pti?RC~J z$Fcusj6|B{HXM97?z8g@)D>p8Z{KDc)Etl&6*7o3P}Bz?BS+`{;IoN%kgj=gFDhCd zMz`_X#Wp9LGBpX>abAA&I+uEl@-jtK>`4ttx`-=#V>4(;heUF%dHn!Y7YPfU*eAu=YNqnqnHR1X%kH!_RZ5V|gA` zcEsZ(GodDU8~YuN5tY=$UY;P?W=^N)RdqRK@wWcr(`~B^Xgi=tT!6IsET zNxhe@f^Cl?tmK3%Oi1oWHsAJ1pn^+b3U+7zvVI?)U4&m9V)pd&()p39X8`*Se)~4N z023_(Of{H-^Wz3aYrgKDoj-c4Yyv<#D|si**hx z#$v8Y@ob}0qHwtjSknC^gBWE!CEqcjJv+<;86hP3D*1X)MJWNu|#=m4g{!CrI}=8WZpD1#4~-Qy9k(Z;DW*iE7uzVtUO5uGh}J27~b z&RMRG9W7J9)nU0?>mNCasq8@P{#-<1tUa85B@pe34!t4?nqCx&k_!*kAwp@PeSx{g zSweHtuS?QWOsZ9d4Aj`c)>GSZu`4aztu>rZ2w>z5Co*yDwt9)8Y0-*aT=m8LMq@b! zM8=Ed5Rg=$C)8~TdA?ObZ7aphskHpb`N$F(ofzQ>c#pgQfod(Yr| zhz${(3|bn6I&ZHZk(Jg*GIc7%PVbQMSHMS_AI-MpLgL>6EG{??K6qtmk63RPco#c`?X`x!TKPkg z?z@m}3@pIDV^%!zeCdK2g+_Tm0;G))qS4gVU_N436-5ga{sIiE2*@D)2G&6wzti1q zmCr(2c=!?(($=HgD9v@}&$u5S;GpVL$~-`u!#X`k#a~*fCXHH$oB#>!Dq2wHfm(j7 zn`H33JZ2;XD?$^EyLm0A^pfoET;*LgO=Om znpcJ_mX@u5man-(M}=FS7spW@#>p{q3Po{^rO#gLYWViZFz!g$C{1K`JGeDN*pz>#!$}PEdWCi$<{Gkr~k!MAxJVp#eSho9rEHL7x>PWP--_qtHx zJAA|W#{h7UoRO$s1onHjtLQ~00;V4p1Vn9V1gZ+ zb=4>!{om9ze9}HIe&Agm*ke+*_@-|)uIh3ifTkCM(hnL3utBfjTItjN_7PU?d{%?M zRj>|cY2VhvvfRB16cf;RsFUuY+8Ookk_u_p4b+0yLUPLs;yV`#LbE0tUIME8Oa^kL zi}rBKp##Q27eKTFRDa}J*b}bsms!gPq2lWHj=ZW|K-ytL3v5`Oof#Y1i6~PV zBd2KgqMn#*?Q(4?DG*8a^|<|42YF&+oVuM_fSy6}Zsra2K~`5_;oL%m@}Uc~DjLe4KOZvK2T@` znS7?|idncGY%c?DGM1@_MEYXDVb}xW)-m|EITu37Doy7mNIo{jU9`_p6*xMJtH%Of z6kCo(cW?Npk%Ni(Az#Xo@~z2ik4`QAdoGoi--V|aRTfoG`b3h(#zRV=*C;YGhaW?C zC@xbJz4h>3Q_$Ed^ZFnw#*GS?;I{)54Z%agEuJB?uw$BH%!mor}J-XLKQzIpCfw0 z=3NUDA}vG)#Cy>T%&oDNQtw_o$sdm{pr15XVEY5Y-#PjS7Q`)cSDUUF%uqnt-%1AP zSL&Ul(5Z4v2gJCXHr7wTdio_M80th{_mE-|IyrjmOYeS3vMyt5{d~+0e~Lq%@2_Qr z4+xzaXcWB?#qF7}=S%?JP$$n2>EDb7aHF{fzQqX7(6%~~Fjdd0^uxVW9#^&2vky@0 z_-?cqzxYyC%Df0~EJMWwEF0|WNNrw;%a03?+z`){$5g<;?iaXoc*Km%OfJlF!rougl;+HWDM*k1bFK{jL{(P!fC5t>YVT*N;FTO; zT)7I+eX}OeC4Uqa7Pfi1jNDf#?UYnhJhTihf5CTIT|r6^N#-H0NZM_(GUB z^vUSVE&I1Ds*E8@LFY7|WMAgb(E(7_ZEOE1e?KBh?DM`cipv}ful|_8?7lQmTB=`v z;pNMhuAo{7dhlRyV!~9bBwJGyoTYAzVm<@d_qqnAzQ0Xi>XsG_Tte@|LoTEsUwA=l z7zGXJs*RG6_XmcR6r~t>xBbH{htHoH+S}VnI}mM_CVeD7%NQw`HGe0nSP=@pyXe>V zdFZh}1a#c9P8oO&b?c%#e{Xpbx)1-UyQ>RT;m+t)^nK{Ur5~+fLE2ODLFevRGnyN9 zee%=8hyT#RBlSOdM#o@zJ*-A_$iQEPvy+ZOZttEVoW(X5O@g6~@R|2nLs_$&WVg!VW}#C18L!_dBI;8Khl`-%+ny13z( z{9|AiLH>K;J3VA2GNX| zfN?Q&+_EOu?u{d=;~~bl73=!9&}f}-8QX)~*6)v$-Z&LfFcO89@`>S56)sB6>&2F@ zzru5%?qHXJ;8p*LajE@DdoA=C_6}q($$Mn(F3Z(8go)TeE37Nv#=lm$Wf#UOdMzr2`hlFrIrd}zTsm9M!n z-0aqhAJug3Vg5N8hnx4pt9%O^1UMCYUeKxXaQ-Xg{H8vfy|9dt9|l8hrxU0nFPdKBX?G;96QrW;p3ll$WQUhKXtKg^(TdCT5j&RvhPix zsg0_LK_iuqrLj~|Rgu8bj5);kmv*z7)Qfzv*rW6M>$Z%bZQ^LNNo5x3jkkq^t!V7( zkEZ`pHTizq+nkfa4q-}5-Q>+O&(^J>UBE7cDm!qc^E5&g4*w9CZ{8I8>V-lX+E5n_ zf~IeEmoL&5V4mSXtgoVC_>dWq`Aonxw82*D+NaYAQYA1xzuT6~^rY69Ruwg($hv_f zA4&uO@>y9nBQwgsC#fBUDkN~Dm45OwZFh%bKI&0FhAcvtccUlsF>4!z_UufVG4>qWcnN$)e9*Hemt#5Gc7~nF$GG;9IEMezt;uhbT5s< zykR}xyn%XCnB9lU_=+MN#-)FhP7;bf3iuzb?IQo8w$}Y?k=pS0{Z>F6B#RXG3DF2} z0(Ct*6uJkU0DK%l#etG^M(}dp6fAfh##<+Nty4Pw?UL1gf=)P4IKSaIRoO*Fg3mMt zX&Lpw;T6#{CesY(b;G{!nWwAfW5XfiSNha!1D}b}S)`(2=>p{tR!QPGx9ItBr;4B| z|AvELus2>Sp&z!$(vbK=dG%UQx4?4-G=-O>(zyk2gm2-|=a2H8;a=UZuY7Jj&5hB; zd-g=)bu??-2f<+naBavQj9`8Mp-X$hI_v#DxPY-gJ^20^F13w}_RPP6?ogjViqdMT z13IK0;%FC2pte5SktR;wRDY`t2`xBULlg065iSOjx%dV@}hoC>+FoZd(vrdJ( zKdLgiqZVX&!c`}UmX4fFunbQD4KPxrU7nmU}j6dwMu15KBP zk&%&0R-g{1FtpAFX1CeyZlf6wQkb92ghVsHxIK859x?^9f5Xn+GXAACw7Y`nhjqJj z(}8%27#!+-Jw3l|`Cj(+o=6bd5BL~VgqwIoj|%d~3oFaZ8kM+4uIbZ$`Sp_3PDnkm z`V-_3cj1TsgXkYg^UEqM7LazR#ZBg4H-}X#Wt_bYy68nPmi*{Yx1?H{^TtQ_vf`|Y^OJ?7phGn=v<*3hpOOH#kI7R zoy?c8F-k^G{S1$$K--4b^}QC2J@RNP50Yas%8w{h*L9-)>It z`*Y5+WYcXmK3?Ub#!v7@h|~G~!^y%Y&(`>JSU|RNU)b8;jRkk|Gq{gDnqf+CYCy0i zcikPp>81*47Dyy}`1*>LteffE17f(34+?ezg#FV57tHMOsMYxyADl8MdT}psTUPH+ zHi-4lko0KZrE}N)SWUl9a9Vy+l)8t^hIA;J3LsgOi!!sRg^c=(vNjc;k9Rv#wL|TV zO~s|MUx<+%JY>HlyYrfwdI3RBbUKU3@qtUZpt_L@q2Q?-e&_Pe??MEz&*MYuziEgP z?b@-KOry!nWM~nbIhr}}A|4MCRI zG=N%kIxsLGhe$5l!djn&<=0@ZmY0_okEZ(aVxjF-P=}@pYZv<{_kywBvz25gDX0JE z1pxZI?bZAUE$x>zHET6sF-dq>HL`!++B!(Y6tX3h5V!5GB)CMi{Z4}+TI}!(u;tX! zi>maC?I(?+MxxUs2A4#Kn?BLKf``CFHUGo|HlcbnIrG2h^kf-n%}KcnLLlU6eospN z!hA8*Ca9`42)8n_j~{=ptLl#j*SjqGH!gtki zc0FEU>EsGLFD5gmq=#|y+cPSQCt7e1u|JEeOgHA=DuHM9K#+Jbs6n977rTo2vDs3_ z`Bg#5B)9|+AZ?XXgU^13HicfRS1R3Yu(8fc|D8A50iOy#MPi)1Az@TM2#}7Eg(eq+ z*?7H_kwjiC6#)^U?&=GJ$IG8c4o@YU|#G`N;Rf z_Z0GP8K(>E>v(&=YFRD(M>w+ z=il?bs4jUv(M0J^LzAvLt`Gsy-vY6fj`>dr_dI@YRjYbRr~Tg-YO809v}F=Qnmn-8 z`b+HcP&nN3i1r3wo)iW$+=06|Ip~AMj&v*?nbp~ENe8wJj(V4!7zTIX3dUtAO#cB zXaAN!kuDk&sJwt{5_)eTLg#sI14^d5p(HRn9ZW5W3(Nk%(Q>P-lDo?%1epGQl?poC zf2eE0n`wtpWocU%!c6Bc%(`P)9>UsVb_5?nsVGzLA-qUYoSNRZn`HU;Y|Q<59{YhC z7!WQv6}@8Z7Ju#iW687g$9P{fvySli**I>7o%9Qs8dF9e8H0#22=FDdY!P=;idSu#Iz-b zduqi;Zoe+UijDfH*50o>qfl{~`k$Y4bg%n!)oPetJykUwxzqN3SCi-B1*u?8kKcz4 z;3ho}SQ6AUG5Hp-qGw^Fm6*&oblpdRWsGT$s-WOF;KrE4*XUf6EM3Esb-ifg1SmpR zon<($_!xqf(u_XBr;pE%az>Pw&avN zYGrL=;#BRK+7Qz_eE+=MbyjcHAqdn?OQQ;Rm?9WLlIM$m-Ykm`n=wAbT>1G^H~_EL zMaso}SYR%TpD10k40Wd6;0@rsRy**@`a1oH(S z0xs||Wqet8JTt7sh;co$sfVw$JH)*--q9p7{SRkHEI$PJhsNzM2b%{E(I~Ezf^`My zI}e$uC2xT?FTmO+06i<7sf$E11^mYqu+Uk69RMee1A6qOHHi17r*YSw_}>we0B{vd z>M8bwVIbJJ)Ng!9WE;A?~7b!>c%S*`o>M*DkDy)c6^ctPo*Rj)$uFT2lHI7CN)odvj#(-9x z2v=i1zQa64QO&`q7v1n2eyS6PD@}~R z->e^98E*2Fb+#KwG##}guYz|w?d3LxZtwuXN^u|*`T44=I}^S0{HMomfA^2lKqc!8 zH0W>7z(!J#UO;03Fk@hE+`47sEqYqJ`Z;vix@i&O-vLFr?CUo zlEFf_FqUbQlzfl2Ao-zC!S%X&fdYy;?dkB96fW|_fNu69Lp*Wo+=q*-`N$(aOcF6t zHQR zlUZB)6-*0O79Q<3L^)Yae6fdYlpTSn!27{Riu1AxT!$qs1YUi7j4gfkdqpvq8<_@RU2YjDO-QH;XgYqV-{2ubK8_ zHq??8apz!_P-ClBfs#>7Z_2j!9fh}L3t~2d`JS0uJue5hJ_&5EM<6yNKl?p=Oa;ei zV&;HwDSUyKQE&_Y_}eXv_TXXs{VT9_DYQ9%I0wSQ{?7b^D%E&g`0sh15ik9|FaHA( z2?&p+8JcXB!r`_KFXv#gphrn9{Zx2c|7i3a<|D=iBMWIMTc|bRgho?*HRvlcP0kug z114yw_h?OwY1*xryFFxPEuGuQew@Fo)Zd1l_pIK#mg zGT5Z4%Xd`>uAj<3Ut5GSgGu1q4?O5@WIopx1&rVJ6w3{cTXdgQp5h^$hVz-m(Ok@B zk^f#?d`|9o{u?`v$9b!8oc8X8){aq?@8%M)Syn@P&;A5xr@4kH1f3fZuPvie4ri&s zxI~tIxkiLV^xM@!IX`J!(Msn&DUX#pF{Z2nZek; z@KCq~g%&fTghbAzr74143^RZ>8ZwlQ6c3I1H%`{nkLNk7Mv+2YGo4{Lp%L65=iI4k}LL>VEoI%QUaA zrR@|n4=DQk7w+?<>g`SrVX~}6sxF>R#jN#eL)#6K9D6c1Hr%k^hld4<`WHLmwV6?o zIlaH|wYAU7KAfh0xq0uN=v;T28xaFN`@Hs-xzB8dX7vC8z5v9$Fc|9;b*60tl>)u~ z;7$H5Za~Axv!Z2xM}NXQF|oZ65>b4YdkY|o1A)Dwb&F7?)m>Jy6CNEDG>{lp3lr6~)E(^!F4wze zX)fDpdSfGkRdQz{=B{XABcz0K4H#elA!F%GGGOtPspV`IDaC2U=NGQGQ%e58nLJyB z3G1YpmC*57AIw|LJ^qX@4azKj@zP4MS&gXcF*9!%u}*QUxth-RJd#zF0?17WtEHry z#9)kndB-c)0t$g^n`>KsJl4e*wp$x8+pWP2I*d?M>5D~nOxKd;#|9rrb9w|PVoD97 zdFx^Y{y8J!-YM0A`ObG9atA%z-~zbK&4AMveb2viJ5!%=F7r(;pLn zT*gJr^oG010f&ViS#|rGqLPP!CV>kO0Qc<$RCCHqOdB2Op8*-=a&p-aO-sFVP8itK zL)V+b8SX^Nyjgz+;yCFQA+;Ja1}+3PFk4;<$&pD8ws`3 z=Syn9%04?p7eNss9dyq2dQSPJ*qLXGX}C4X+e)ayk*mw|3L|9{$c~JToBWbG<6u(f zYR7X6N=fw}f)EvYh06wUuSEu~JpMNg+kWTBq5jC?6Yjq%ICci!p>KHCq`AB21uSwp zI*yO!DUA{waXtoSTMmL-IcmIX1WTV)tvXV=n{Ek}0t^g)j5YhXA`l9^!|o3xX1=0Q zg(7B-wTEeZV4q*6gi|t#H7A}dK6<;OEucuSYmF}!?=V#_-g;?OUMhl_!5K9QiwX8I z>+vub#qdEKXoa$*@PnoNH^>N@aPop@`EW29DGNdUPL7(YJ! z(x~dkwYL|80Z;O-vD87pdfV}*?cwTeajyJ-j^Y4>vafA>9q&q-epRQkIJfko0`H-e z_PNHkIS1kd1`5<>pnkbZ{9cNZ(_)sa)YyQadAt;Ub>GWDf%zz9D9>H;B2tlHWn13u z)IY0duu}4X7)hkI{u|-HwtA}9PlXELWmm_*_>I)r?&*iZ_ZXUI`V1N04@tx%H&4g$ z(MapK-#COadVF~9i{zAiq|MSK6e6T5#B=p;a^juA&VNpiru(aop9B9NATfPe&}?Y& ziQE^|k}*a%LXg7@uHs-mx-U^=Y1g4Q`lL{s=9t%R{g{_=>}p_uEuuMiI)-m#E#sv9 z6}!)SKcb!Kp~F2huK}UO_r$mO@#8qWi9x~Nft+%xYfgsl zb)tOc?M47g z8E?_&yFBVaOH&|xK79Wexjv7HhGby{ua1;Hs?Zmom~4dzD%Gs~>hCYV`@9Vcjbp5y ze~at8CzW0y!RxB*=FiZy7|{LoNFc+(LIp8E)K|-Bnqs9vyr$ao;M2FfC*k3|%)EdRw&!P)~Wh?8ucv z*gB1%0dy)|>}fOYkA+{D=yHlbd^e7i8M@fVNW1f4j`kwPJoPj2s8uas`i^|+TwOmM57JG(9ya(m^0 z&+hli)Y#aw%F1}qHKqvKaL?=$mlqfQqEI7OfTcA#Gue*`Xlryb@N0(DqQ;L%`WMV9 z|0M*^7Cju>IuN(M zFi_+2QGVqy4d>%1;Z^OB%%*&#LY2Nrt_Ya!Bk%IDJ{y^s>>8E4V{&*QBYDc5Q&NIZ zE%Ek}@6A2?GVx|B_{JhO9D8bH0snU5uEO(l^Q*4SO7ZeZ7~Lu_uxbyRslAJ^p(>sI zr8zjW#o)~L*}t`otBC>F-1SGk-M_MNpL{XXMyJ3qyqRGAF}9_-LrBNs@;7G&FGwB# zgvit6g4NxHnZw6KM2KaJ>}_l7{*Y}o(`M+BVl<=JxH zl9A(8L2GRAw;)I~?DBc@m>eeiJfF=b;XQ!~w<|1dwG|ek{?m7E;ewOJt+H^&*ibwM zvHIFWb+~3}SDg{TUBIG%7XnI|e%e9~*WRdOOigYK-O&o@ZvjJ7Jn_h-mce3V!7$E+ z!QVJM)G{bqGnwj}%mvN$8m_=c^Qc`f9I(0Y702(tRbUX(-WN z9EJ}7y(R@3)Md&mD&l~tZ=lpp@5arW@(+IB0EAn}-_PR9OG`9@pk2G7x6Vg7JPh<= zu7h4oD?eI&;NxZy++n0(0{wjz`utuT0Wx)YCl=9#DSSqGq81dBNtLP&X_HNz0##{vRBUqTDg7Reob+8CN+k}&%s3oT6-8;6{Q!8-G#39fm@GEhI5{;m zG78o%8j8xXBE<7z3O){(ce6(N)@bwQ{kMax-gVTmagI(EJE+vewYy zR_Wx!V(81#o6`PMPyf->nws33+MeF>HI=^nE6wGblmDIsxY%2xx#J!`eVtuRVe08| zNyc+X)FLpjc?S+72;_1~8reK0k6ZkEGUdQf*#-`!@v z@7KA7fJSHu_x(go!Y}u}R(vfTt`_hz?pd47#mqli zM6_Ppf9Gy#3ZuETlW07u_EIj`;lnVRSStSEb=RG?F-f#B+W9+YDP;8*f$%MYq_Wg2-mxGwzs6) z6`lxKZ`)W@vRTBZpG!_~GSN^B$MBnu_I0jAYZP8au%uu&<>t~3->pi9 zj3>9Wj5X%2r>Rky<|+w&2@gKK`X|4*P`Su@P=_P@t5|`wW=zwq9oDqU8mbfXNk!pn z9CJ11=NH1B=hi%DVT2dHXjjvJ?6L6AlBk*LYg2Aq?-Kj$cQ$L9jkft8wFuh0BBiq9 zAp@mbs<9#QZI41u{6vf2{aRf2?L2B@%N||12D>I&+|i?;kx+7Vwd%beRLdfDH|)jN zf8URYUNwNQ|Z5b>oJO4m6 z^O z&%)4IbWq}#HkR-reTetsYKy1=+c5WwkBs#O%BKN+=PX7Ootv8Q{dVS-E zXN)cTmVF6h30bo5%2>vleWytFJvG^pAE9ea`Frb6y9Z`@Zh`zSj5kzU0>9_)MnmvZYv`+#*;Y=6KmdW5flhGu^qr8-I>YB)fLqWJwgLt@Rd|zMJRp5=!vMwGCb)|0?YTw|?`R zf`f>;g`$CrSiWDOnD7BIOj_I+1hf$+LGOjFNzb`-V3HR0aJLKg4_^!g3wM-U;L&7~ zwJN8w)_FsH1`iOU+a9y^;we;aJr_BCON5ZC=Nb`;&vS-MwC(++#=tYVU=x( zu~T%2xj#&6~{Rlj@VhEMs{F zL)M9?rHs=`-7oW&Fj{(Qfl)dVzg6t*?KQ;J!%HTR_Mjjvkuq~wf>_2Cp{&ffz#LY! z%dO9Ze`RaQym#kM{i2L{xWzU+y5V|p@b*EuHQv^kTVOYZg^rrO?6X+G<^AtP>MU5* z{6DR8^mE+Rjdy(nD1K4m!-?kF%)}+1({Nfh(FhtDs%CBZDNSq0kSd_v`3l3o>BwZ{ z=rd_sg`(H{*)eY4R0^;?{jvDfFpdCYo=bTt3vRQ-F>z(ZbkXyM!=phJ-lEl3TM`KA zGCj$t0ox)e#P9DF!r>~p6J9s!YYvK5f73^13N13OViY?`oxgn5f1{Yb@%)5JzThF( z61ADW6{VDfyTIs`-n~VeVaFfNJpPo`W4kp0$8B<8oQkNgK(pmG-7juB zi{&iX;Q7Eu%{z$cKgC0J3Dz^KZYaYl-QzI9r)`Vyf@_^n{Bbzxmb6N8)?VMM-No?c zZP8z`UKG?D{>OOz%*XyGtl}*)y(x&j#I!K9?LGy8{5v!_G+Vsq#-oE?R{;H1-{AW%1`Otj7o>+L?x|zI&(}@ z?IezRxpL}?9taN^IxXw&jLPS(`0!9s6Wp*4{28s}6v8=q){L?@fxoAZkDbkz!Nf>K zf+eDk4*6@|K`ifM0%CT|m{lBOcG7TRpLo!{O*TO6=oLCkbogV~?2S42AFpcqZ>K1x zhNy=6_Phy+Sng7^d(2R-Qu5LBVKfaxQ$eWyoJ*GdU*VO9Zx=X zO|Dd>WDyhJo*h-b8Sj>{FW#)Yr5SJFf82Zp0-N#?;Xi}qA=3I|X$;+_lvfE61Utn1 z*pqKDW^A6f9%+TXe)Y=5SuJ0kOyO#i+W9=lUdU zt;4kL_17EHhMd~VgSA{x5OLMGD6Eq!qM#al$-)z(z_qU)#qoAlU-gJhs;doiC@xvW zbOndSdz7v+cAKt^67dKcrJd>6r&S8wr|a{tgvbddy1Jc=PuY7=L)wX((Dg)BeA&Xf z&4IAZeB6fax0aVeANHOCRh%dM$B8tTHdkLpCVdy7t&*k=vrkyvik!XOp7?Y7VE&z0 z4XZZ05MUI21XsjM zCteRknZ+b_E@lZjbBVp=yV&(q&kpUm=~G)3n@WtH<7}^a;9vrh_w*X&P@sjzp0g>YK>9 zh(rFNK23S}VOy$9#v%52qMpH!U+m*>-Z;%M$0j>rY+THlOfLR4qmPeIqgbz_HvFL; zyAS|D)52hK>LZL^ett`MM&sMN-n0|`x6wq3Dl148y-^xW-&HNaX&!lC1Smnmu$vv;5l z+nc}alWHFK9@Y-&zqIJjEI0a|2sG%??aK_}ZnbnJdSTM!imK&wa|Nv^*SobzT|A=P z$xk8(RoI8m*Y?GR+rC4<2pE=|7uynz4 zx35~1MDW@Cjz)aDh^%v^A1r)vIrtJ$!OIag{IXU(L$vtNG&`{`Y#OFQ+rZAqE>Oxn zrNtCwubrD%F>#fA>RLQwKBY?XrI@;L$?Nm{wD$Tf>*e3?6(|ylfsse^ZE+qe6YL4{ zh;6&2YZHto_>eEX*e=LO#Kz#SRp=>fpI2_qpn_h3F)KqUnSBU$tPBw4QUPM@4t7|n z!8o*pK%%R$Psc+IiwrvqC$*?_eFj@tu%qzll(dK(lAKbMDM^no8 z&@l)L;DI%Y)+-`IF-pqbq8cgW*tcD|RQu>j)EE(&V4~*GFHd4YXgl0cQ$Tg}lE12y z+#1Xl6Y8hg^TPb%9|Z66eJmcEPglk3VWos2?j8jaYY4{GZ?(SOs>u_6*AIQ(Kx!h& zP1I&A(|mH-WIX#L#Gen4+!*fR?>Vcy=L7ogHMtC9-`_=`!6H>4VR7R@U-yL}dkk7X zXrG??Dw$aS&b3t*WeC-tA)VHiaeT^cOgl*Ynp%N@_P4$V=_U*Ze>MnwlYTak5*dAE z@nfB&%|Hg$7~Lc6JIxq}{)Wc7Y}Uy$B)lwatz&DTK+$Dg;CwhA6;cgt-~=OvE{c>_ z4|&hgEmyyz+Lv7(y;K)j3?R9bU9+J+yzJI9$}}_jJ08j@@r;1inI3wR!cnO4V5*LNy|Z7#$fv~HL}c!7Q(~d#c=j< zA0FfB{TJE}5r>|Vd+Tw~u%#R~Khyp4w98Z;a4jg51<{zprzPrvPBiinVqZJv-uEiF zF-L-X^YmJ2!C%IK(JDGa^ZFuc5a!+^!0-_>0JT@@_Jm$*;WF)o*IGH4Z{Idx)08Ml z*Wd{~nTjJh6M~~pblW7R_>*bTcZy^Al#L*ofKS>wJy9~nhv+kbKYijbZbOcKaSLaI zR2j=y#Ayc?E-0#8)-qM%XNxAkC{lRnquDAXFaac-`^$Y8ad zsgDO=Y;RrKuu72OeiE+Fk9v|S?83f$fr%~4GDlIV(p|uDY->hlIzLK} zJPa>|nfzE5N?odi8(BI$YSmtks)Rgcm~xx|X{0#*LV~Ds;JY)7dX& zE9TP}7;^Tu{o-e^j(o0^tW=-iE_mWbxR0N{CM0>;^@g4=zyG!6ro4t3-B3@}0OMyw zD$y(6p5|-pdSgYUu+nRqI0yW4D;bf_oXR^}CN^KsZ2^MZ+@6gOGm#s^-uU2q`u*Rm zVGbjiJNW&r5a+xQg+-&`RK7vX9wROi8d(s?(JKqt;O?kukGCFp{d7ln?YReH{>zp- zyfxbWP2h$?n1lzUA;&%b9#3_bJQxqb?zK?-g@;TWp4VrWrfu$;`*p10^z^hHYJ}u^ zV!lC2N^0F2QWt$T;R5jMySw*{l-FcH=VzW0C(_ha=}P{9x^ys|X~Ja&2VDt4h8KRi z9ErO(==ZPqU)>x)T-6uwnZCGHSGXcgjZz*ATNuIptew?m*Ia_!Ypk%J#021v&?QhO zfo$aK2x`a-F*AKhx0A6<((A|C9;t;rk_e{w7B-YnUW3^cIR?WU`LtmyC}AIkqyXBl zIZEUs(eb*ctyh9C9sM#PJW)`qB4*p7YN0@J1c;nuVq)UU)Z5{K?&=rb6e`7eqyxq( z+b`W%Z5j4Rw|U!L>&x<0%1OeNRfV8BHgS&-hog+rj{?qVjnn(t8k=-K8k*0`-g8AD`!UMD$MM}Q z1T|ph%E@&7t~fsHR7MVl=y-pBCji{9s}&U$xdB~lU!E(=2MdEON-O6NmaU{5Km zmZRQp9=`VL!bP--N#UM0!JW{H04(t>!%wa`8~rgXKu)ZpkMtw`c(dWlTYnj_{SwfK zBog&>VPhugNJfPV`>byhdki8%iBWXj-0VE%7{TI^@xq^xn|C|)%;1YgdqjSG*`#5y z3fJz5PLFw7qzJFrY0DrYc(>U7tBjAlr2V0T3I#bmob@B`KI8Pw$V@t7@nJumqPDRo zJ{vjdMY>Z@DoIXBQIt~mDt86HO=1qJ&6h!#xzmg=>MV(pHTTU+y;p(rujyvQx=nkQ z%|Ej$R2tKRd(<*vIHw|Ec~oK1C}!o1H@**Ust%I-@eWeWwsVS z5{s+G*`XyMItm-9Nr*i5?xGM?k^2Xpjm0~5UT9Y646?1617V!g)=LsV8-I~!m2 z2)ons?A)Ld<#Ro%65XNSj-8?3>t-fE?4?WZ(gNOfH9gK3L(k=MTjkL1BI=e$%Dque z^JtP0&`cut_9kcB7&GCjcXnB=9K+t+wPkGv(YQ1tzl+7?sTd`o7+}e8EB* z4VPCpgbjL@w*3US4^u_(=TI)dnhhotouAd0r!C)42&sUIL zJS(lK+5S^mDc)=WI_k(9Vs$se#W$}K*fh*~UaSmj6CwennJ9)4SJr7s@Fffnqjj4l zDfjINJQ%ay)^_BM8~s4bRCNcWZJp00IB1&Bw7Viuj+FJ-^*MRN2z+GxW!8+~7Fz0FXl*tyO3y?%(A z=uD4^wp{5hBS~f^q=cS2z(~kQNT%ey(gN0PL_%-u%DC`#7dN_wr^3mH>iE;@1h(#O zEG`A-0JJ^qKE#j6k*XyX4A)Z^br3CWt()^PKZWuR%{N{|$-iYqahtpe zzdMAft>H^?+O^{w`>j{@DqyvesTx@XPms}-p=gEfixL!ZHBmydYT5$5QOa0Bt#){J zgtrm9NkTwCv$m_vE<54raDeWFNrtPK-TQbq)m59Hfr~Tl8#1!8=WLUJNwLeP{)e_x zKA9OZ6`~-UKhVy;?|H74>S^|w2UX?_2njf|x2%3D-a>L~;sOlIR%YSI2s_CDWV#15 zAbYtc3bvIeI8u(q*^EmTuv63b5wBk|l8Rse4if#>Th_$3KBGkHd~%(cj3fqvJ$k9y z-wY8ZlM9G>Oy)cs2;K*#6I0lt#Dy%zH7*`;6qs* z?Yx`l<_ws5ZT;)=jgOOYl-6AECU?G5Wm|X^dNS!Y$EWxc;)gq?dG7$Er4vAbmN1^+lY5z;7#@K8XB%Emq|pa2Bbd3uB4Z1w`F=2*m|Lw~F#5Ic zPizdm5JWgeweYa z3VE~fn7%x74mn2Zd%jR{-bF?~qC|Xn)7E>MeNGdZW4f<8YroM}E{wKFc8PHO^w=6= zzPW_RkMxv%4`+HL{r2g2{#|Onc*V^(Qb6J$E;ZcfdH2QaQX$2A0CcB3sdGA~wDl-? z-u3%?joEorN+k_Ca;K-ifA{#N;1Tu{J9lPGBm`rj`>f&XywQH`CUvy}N@Y{ex!Rw9 zWKyia`-%zva?w*54N;pe6KNgDOCYPILWoVMUanZeQ$=}*+l;)!@^z>7tY9bOm`io> zF1qKQn+=sl)cULvs!ad<5PHQS*ZYAM|GuF_^NwQcHmd%20dQzSikJ}JDQGlh=VOmJz9 zNe&eXFx|M1G3zf6r)_7rjUU)EDp>xqjFZPM*{QZn*PHO18I2@GICK5tBV5LLVZYTL zZRROjUX2->pGN%r5Ek!_s!f<+OS=f_Q1CS@v;exhhc2Oi@+OSM+W4oCm-j_%eh}@J zvi0?!c>9N5sQHi11?*`9OU9R&I|MA=PER|6hI~yoH@B4==AE{7b~a$t518&&(%hUA zHhqXdmbp;l;y_8j7!&P>1SwWRE8=4h2f7CPu=m;c>mXxl25bf`L>(4^@Xi~}mPMp4 z#}A{;8+J-zuWzMc@J)LRIlPtB`s^Np0iLUmN@0VqdoXG7(^SWz{>&y!O;PTbWEk_w z=6U%U8b*osK)Ov_^u&hU4~9SX3S80foZ` zuA+G$?L~4Mut@}lNvAXx#C+l^avUR z8{7<}n_+v!Mxlz^i!J45Ted>=K`4HEG_*owWCZW{MHE}TbN$7|y@dQv5UHdH@`ejw zzJcn?1JR!^8fls@0dOt+-bH z5VJTt!#n!Jks|l{o94PXk0s!(1^5Gc^lK{{*skAoWxBr?k~&WqoDIt@v%Q92QK6nz zhla=hmF)j)p&E-<8WKet5d`oP(=T!MeEFuYUQNAr5!m!1L`EI?uZ2x1;@EngaBYUKj;qz%5Q zgnQ;v^|VLO+rbfC8QhU%^?F`NUpP!E@{~a;-!l9&eW-dI)@*|XqC<~eluKe&wZjz0 zfnhRqN4k5?(6llgm`PZmoYZ?Y>a2X3f}SC=H+<7e^)Pu!m6$)_jt=bT6376r6OI8nU2Zt%TRxao)9zZ&Hb49^fd^Y4HkK+;z zyeXj}=@e>hI@DC=wKT{{RQenI^#b3lb-d#eZM}3Z80#`cXNizx6%?m+O`6 zw61=7TquU-yEJ1rUumE#m@!j7``KsbHigO8A*A=id;@O`&qFQoqXOMr>wAp;{3+(a z5&?Ygv~H&sdVWL1)8|ng%lZqZ;2bw%@fY4&LC^GM;u_N2nFKt%Wf_7pXz4oZOQ&eV zi1&GS9L7r|Q*?#Zb5TZlx-#y{?8Y`TEDs9CSK0X*kBtiG=NF$|Lmq*W2GiDU!ZO0- zL<%2NF-;{o#W{DUzy=?V-?ts9qF0DVRpe`b;Z5qWO@FtBUp=G6ORwK6ki0K^%OG_~ zDT8fW(IcJ#Tjwk5o&@FskJS)w83E+PeM3ZAx(lYU7bw^5u-xw7{hIie zMO){5Z<4p{!i7|NA6Wbn-2MCpE>c7r-?7>42l;8$4{`&$h4(&#_>ag>ZzToW=MSr3vn79(Tc#nBRI4P%$Zau3PRc4&kS)4(nSXhyTrj9R)*;W%^yu`EWX4l%dfz62AUQo`KpfH0s0S0A#ACnRJS%*)*kcTvpA1F`GHb zXG{#86+=+YBzEbqwu;!>x#4+yvmj_Xz4GPVZ`y{&OBT=1$MYO-b(hd+$`+QCJel-u zP5^UKi)ge`pLQLB#vg5Pzt021zpRSRR~B^%>4E^H~l_*7cWgengHm^$|^? zy+b$uFcUgJB3MM_NmT(82PP}`X2Zq&J=3BrjW1@WjHn@nu=WbT#dUTm_P9`W5VzU( zH4W?h-Hk4K?#hMvGU6P}$E{ureEW$%Epn$QEK-3Sb$OHGCVgFv+Z6S2HY({1J4-5= z_-fVo0gYn1d*IpC0yG}v028X?-5Ith3C)O(4}Vr?_M!?^Q$!tnfF&9LMb5R#_BC%+ zshE9Uf7!}zWNMew1MY9+D$!XTDo@-O&5cW@7miP8qtaO%B26? zDi68uOdkhFx~8E^7`*KCF^p*?QYvbniYR^RWAQ=lN!F7q0!Cr2Wp~rlX5bc;2RLezi)-s8$asb}tY1FuR|IqK#*Dg-+IFmP4ZykF81X`W~0$8W^l zk~ps3RQFdA$*Yqq9Kk$D=fN8=v~TV_&@e<{8jGKWE~w7+;jZA-wk&X}8frFo%kC;V z`y6=SmNkTu!dtC0!}arK_i)V4efuJWJ)FZncd|^AAVb2E5s48m4YguagP#%aBHhT< z$ngiNzS2_n_8B*IoTmvPx$x2<8OSzfY14^*U_mY>zcUTH)8c8GH@PR(5>p>*-QN8E zZvbpD%Idji)(tOtK;)U1>H6$KuIa_qU%zewI{ng)8`QS%z1hITa9yq#xX1BDpnbNR zo13e2xiE#TSD))bY{@xcwXhm2quia0$~!}EmZ%tz+Z_61q&KhlOQh&VlD%c1Tq+#C z1F0gLHlC7(d-(l@Ey0|+snyq>aY|XLaw-)3t$r?i_nAo(jj+QNbIZU`?OeUu#=Gg= zO-V%MSTn*oGlZ-{e>x~i%Fy=}F%xw&B$(3jbx|(tR4@<8WQs5hoF?E)7n2zJjIE%e zn7WEkz5M%PVix%>7}7HEK%}?7*T;O(Dj;mA6}&iE!Y_Y4dBs>G_2I|HyHl9t390DC^K-*TXM5%` zhNVPL;T}8-n3JkkShp<`{4LRi2S=ZlwjLv-5@j@g;{P%X{NSEzY>A!SX}*XLvbE@s zp|_}PZ4C!Ch<(Ql*8u~rxWaIGz|032h(7)&x+!3<1%yQFjZ55zhf|p;g(0cE3*vs> z-eG{>wLHYMo6jR3-~=9SM>XOXr_n`83khw3sR;e$iOd*=1w?)Y;_C1I1m4l425w%4 zlTn7a80LOtx(oNF5upz;=82qO;ei0~7ZQY)CKrdg1FzdEHrBUXnAVIB<+&W*p9WqfI~OBn3#Y;tu&!~>elokyKW+F= z!%ZhjGzGq<-M-e=muj$MGu!gu!5R6c|MEMOMH$zE*>v7@rg1gwY(a;j%Bjk$B#-V1%pN9`b(%3E3uTnC_k>5} z#4kUEost?~RF8H|$v0!H!x2YIjZF!GQjgbgqzz??ZFnWYavIN{)bv z%P03!%`dZKkDMQ@RZO8u!ZyS<1`?)nyl(9y{)YSLWzxGmOzHdHyZtCI2&2QAC-kOs z2L$J9o`QKD;0B1rEL~mLX$fq*e?UkN%=^-&7N=7=o1h|Dl*=I<_`~<-p~JyJ?o9Va zImMk0jL7KFP->1^_+j}wu_KAOF5c2uE`Z(3_5XO`XW(TH#WH4Bo38WJTU(f7Yinz1 zM+!V4e+LX3;sXCQGx^Rnj^u~wTGppczFUnid>N|8t=oE#=mm9ohR@Mj1^X3KmzTHVNHIC|^}9*V33F)j)Y~(CQ!QV9e~2 z`-2(xkbiJyYoj{dN0u^rzCHNn(eHnXIvb$M)2t)Mt!GO$89tH{@0KP z;i~U!zF1BPS%xe;_}L%y>G@d3{fIu}+bD;oTv3yV^&Q5l4WHjxLM9Q@=`&;tJ9m&} z+6j2o>M*~l2Y$Xw;f*QVahx4NPl%NJIkczwbt=0Pa*hAeyHMuT>Zh(aJOGU>OFjn= zTDHU7e%jypIc?pwd1(J&dT3yiefpE4;e(S(S`N)&faf)@2B^*VV!ds|DlCzirXyay zzKikF{V(c9P9@kmz}ead^~uU5Bo`i<5JzMK(ygf0wM~V20eEod-yi#bSiH8W-x=*R59^;)P%Inb=?{8IO z3@^cvWhB(U8`?;c)M>^iRsj*LPjsjS|G{QyY+CM-F9WQG?bo#uP}=4ag~R z%f$v4{ER##D5~UXC1X@{QG3pfCU%`L&S&O=BXR1`GsslF5*;!X{*QlaLj{{HOA`|l zGtg+X6IB0PTmA(Tk@{7(4M3K{>`4K;oTMZUFvouGNf-URwEdsA)LLF*@a)fTFr}pp zL`kWICLh<#YAaQSdE}%mjh1` z2S#oCQ*raPbx1$Sx*iwipDThRCt5F*!oIeBe)n4zcNxHQWt!fJxd1G#%E_{^P-aaV z$gwvzJ_Garu7W;+^ZCKw$dP5?C4d3K3d&MKgGhxZWLpwoIJ@d=A9P7TAiqZ>y0>{| zMlAQVlH>lNJ-;M%mdWAxb(!QW%cf$wK6sCnbcdk;K$Y=H{fS&+_5C?@Pvc-#AduLb?Ll9x|;tT<*6A7$T_5Vw= zZ~EjVd!{QJNP8|7=lD8<0O&tLdYLKMl^3U?1l&WXsUuzug4+hW)G0=WYXih87H#UV-SR5DUb zi(>cJoKyJs>V~-A-a@26jGBD7z5#Zo| zt*wDd1>EB|6WZAD+0_MAfQ#-N{s0a+e3Uqswk2P<-%6NBophRV%I>hHYmAtC(x8AX zi}Ws}8yXkIh}I17D@eVFE+MiiIzNejTs1hTf3HAl?*mw(QUAN&;Tm3G+X>IZ)CSI3 z!W4t}-%AT4@fo5VwlBkkR|M9p@^o1)|i2(_}gcu?fEllx2rB)@c=Dj1d15v za|ppCBwa8OJ^DKDdHHBi7Mi6VFo&P3 zpZJpuxMEjyH8o+Nz+Ov51i3BF<~ak3f2hDDD@iQ z5x^#k1&B>VYT`TzB7_h^#7!E1y06KSl_5K#1VI3&{O2`6ns(NJ>k4kfVl{wK1a&%B zD+Y_Db`1~+ml%bN&Lsp&49}lQxsZYraQjwlNJT|eNTZV~6ANe*@?-!PCC}dOu3EMd zGcvVS$B{aW+~2`p$B9%Nl3P2PL1j%gMQ%^Y6y>tHqdpx$Jr(u8G648EA~2h&4-5@) z8hG~`dH!Bb<$R|;eIVm82&#Fo#qI?18lbwJxi(o}6TE5}tK~%uFJ)$m>qO#i)!gYj z90YYH3V`H*BaPj8-!f(8C?!(_!8Uyfv%_fjFHbpi>@TPgiIIZB!0Uc6p@4=1lt})0 z66RaptR^JjkL=dlWVCX8DoWHE+81S_0F>!~dA~>fL#!660KrvaYsn zaRrz7eo3NMDqOX48P|>e7U1AtNR88pDu0{R%Ka0a6BUZx`v~>uDCKDX7(@TBg~BbO z&$Tt99T*`K55UK*T#;6$4t9Wq2efnoD(zw#t$BWq0M%^Q?{&Be@XU6nr>Dt4Hnmy2 z_3uz$=#iGw>7PuV8^TOVOJAOS8yp_K%jq8`o@qx^%h0K_;6-LuY%mJ|iG^T#P4jw_ zSNrL$)WTRPdWFc@kI|99!F%BMpBwKY1C6B2#jZZ&UqI(W+es*}E}*V<`72o%y#il= z*v=(=uBd+s7?>I9(BPhiE-R3ecZrNV^QsX$Vv>{&R!JSQeR~s1$s!KV+`ch*4pvBR zVnzw~zT=>b{+M9*-n((fZsyQ(&pL8@yI*-D84!q{5799OH7_A38H^0*&s02y3eVP8R^AazLD@G@#)S3kFG%BOiG!MBI(Ghsk_1$o zZ&Y07UO!pe;{=<3FSrbqXJqvOh;9kCbQ2V0H16M*2X4dvBji`F$lpMHL5iaI`b+=N z4A=YAG&J|Zcgmf(#>_Y++VGlaXY&h0H?n28Lg?PX7-+_4yrr^MHDcBysIgAHML6^ zIpG(MTLsEs`{X}9Nc%5HCEubm`>6gGBuFS?Qk5CN?C4wo?R(n}Xocgw%s{>Op8~Dj z15fNF=p~(b+PiQrxyK-3S;h$fKs5tuIhW0f3u&}MgkR>@h zJ*_6&`)(Or+h^d7nLZIhPLOpv2XJqm|4Cn6E)bqN^KNS0ynb#gcs6NquRxZsbDTJr zsMf{h_n;+l8_^qs|MAdNf!%xoke&U&7l#_?0~^uvZEOq>mq;rRqF|;^3cBTq#b9k& zuHQ)}-}@XzvN(lpO-%Z=w+b4$ww&xv#*H~eo=gB2?(`}++YRu0-2DrI6=1;l^XCIg zL8bF)@U=9d`uFco3+wCeyPP|_WiE!eNTN6Z7(qu+hlvN%l?k`vOmLsaZ@Sj*FjDh# z|F0i;zRE25mRah?(2dr;4xk#@fF?J87Td`OG&|?ruh$H+ZmNZmtN>_@3*baL%X9sH zk;UQeV0MmQTcaSm*6-hoIey}fktaK}`2fx|CMPd{CZ0dwH~!AG*X*5G&Qm2hU~9dg zF^-2rnWyh9^X*dM7HEJbAvXXIh2)UVUuWf#xk-=9=n~Y-jMVf|NR%Ix_CGG_Kkwq2 zqb6{SUIA3>*+oEIfC7GzMP+fZppnWJVq$UEUay z`Fu#%b-57KwzPjsXs-YJ_OG!qCX4Cd(kXKGzDGWxc{+g3HB=xCLsJETsgXf;R#=<-p}m^ z>rJbb!?%WOF5(^lz0PCZba$CcE)MkuG<5v<0W?Ai$gMkyXKr_jAV+5X39_Y+lHsNO zU!X=L#U(U_MAXpxFo1_K>1`&p#*q`7+RK0pN z52iX-+CTfx#_@x=GyqzA84|Ulk)cFq@do7bUtgnzhM%wSUuy{MS|q;sqN2hlP%ZyV zMN$fQUS$J+7JAuOH(&Gx4AI)B3@i;)+u)~{*Vo$wGJ9Le8>r%FKz7!Ula+Np3WP>9k z=}Pe-o9-E|Ea13XRtn<8o8*HfM_xBKA3+JSz;0cidu$D$9U;t{Q^VKJKUWOXf1C1q zez~*wQHZmg9IHbTQ4*x{d@29jU!f{!FvNr8@FD$vl5y^VyJGFz4bN#|TtXfHc62Ow_4)lfMuG zX!j`gn##NXk|t;zJ%?KFyaD0M%NxQilD+S=&Q;J45N+829|^e8;94t!QP_6Ct~(ta z3`Cx0mb#Q)EVrzfpa?6fs8~CM~ZUN@T2q?+{lD22(-mRxKnZNbQB@t_G z;X<107k%!mKs>)F0Ur~HVX@s3S~1F%KSIoa7wrrl<=pn$UuJ0;mS2O|+o zyZMn5AIu3rT|GBE|HGxEvP43~GzA|Gp@PLgs^w6H%fWQAg)9xC!F2x;E8JtlK$lCS zl^w9_^C*Z1bEbOgtL3|aR7LK+2@i0vdf;HgPfkwGmbUZHjs?KM3W0TL-GpscovSOf z-yIvdD*@K2g{k0NLChOT>3{1y7f>ce39KlY$?P3EL;gYsMJ5F?u!7tC{-NhW`R5vj zrau?ayx^{@ujip@Q9oY+paPIz9j(yk(yqG?fi5)|&{py6&nqC~pr{=5OMoFw zy`JI91B$g9x?E4-NZC69E08iA8uu3{Xy@n9+CrKFZ6-8Xlm&MFzxBiC82b4)0Osxt z%#qOCT@@rdql3d)eyeg)(yI9H)-y#nnfGt3Qw6>Uz9nHKp&un4(jYg~pWyg-CAzRE z^ZM-ND_`w0h@ccjH-uf+V0#$?cpG7lOCGbAqd!IuWyrcB$!)TuAvSdHFD7v@Flb3L zil*u;?KMZnqa-i$I>JxxXy@;(20Ny@+8m;=(tu7#HBBa`Z1|)%b^THo z%VrM07AZj0RRxJmU21ISNgY;;i7P;L;_>+GVct}Da7uSE%`Zp?j`_t?C4`R~*XAnj zaG2Mj0Md5h@%az@Q_9N(1=i}1z@^G#y}sW!%$3?SnG|{K;@f_*3%+|PhE9#)I%*3* z`WIO2{ja4c=}Iap+JM{VU?Y<5n7VZE;6Yw!^+R}3W;9)%l)U`?>nk7&*YxR?DcRkq z+|jq%ek*i!&t1U$(-6}qcd-vgUzGjp^T-uRwP%4O2}wzNvN?S-Bwc3px+}ih+uP>l z7IHol_=!3%v!&W1(^8o+Z@HNVqc(1>qQP;e<)1%)CN~#N?*Wr=c^}sgHHE&0Uw(dp zaKEd&yJ`|}Jhqm)z>zCNwEA3Md5wq?A-lyiCX?2%Tnf%KrNTIGvbId6|FX|qEh@n{ z&mnKQBwF5Fn%kvz@ZfgU&|OJY)#pmD9M-($a;kh~*f;dK3Y@$>Jw<^0X%`=yirBJ9 z=%x;R)+8SQKLO+p-WcW6s4J!XN_fVo>S20M(Qb$v;q6b;L-<>X;K%>oyb_8=`%SW5 zQ7SBwoHJcPYe;0!Tdwh7F~`iU_Bq|hVBW{l90^@I;w_ukHxj3zfA>4WADhsgbEP3&##Pfx_^#Z1p1pV`KT=g+J}EfxhkId~|XX zGS#x2?&G$QpdfcnlV)lmO+ECJsm|TxC8$nu;te|#=jPSp`h=pp7YQqG{=HYOp62n2 zD6rRVR&{F5=S^BeD?!~{_x-;}btk))A1Xb)Lq1y&9UX0z8@~H}1kev(4Njr~q^VR1 z+Abj{hdWNt7_`e)bv(T2?fdWE+{!8{$Ah*N%x2xXslzY3(CS@-7+4**JCTziS3#j1 z(Gmne1eULEyp(1CcLoN;EB9w@p;{)#+?#lC-z`fH4j%ZnJ+PR8gA_)jd`{n(@8tWn z%3&#`u+ZJ#&(>F0Lscs%*^gDz3uW@mnL2Asb-*V-0LZ~FjYy0pv>eO^<-%2+f2|p< zfBrxJx$#2HrJMMx<>n%G=*+_P2wF?B2S*C(Gf)+FCh01b+6rrqw#21Z7s zqm>Rv{eg|go`kMp)zn&w1sso+NIF^3`MuKIDFmO?A*zCfm_2dtbj5i^Ey0TLSAHz^ z;*$RVznh(v^(n4Ef&Ve#`378I#9ijKWqGNzA`+f9@P!`se`v({*@e#_4kGTmp{Yv|iWthjaXwzP{=Q+jqcDbClc~ zwFn&GwM(dgVE|Pp6Ju?Q7!8)@tuBtzvDJDSYU&fR%?4JN=+Wmo0?6Jf`K|`$ecKAl z@{&X#j}8^$ea+*;g_~SZ&I@5=4b)Fy%|F-)cL_c?)Le$|-OGR^wf}$g-PZ>~?_Etx z&EFLWgTlPg4?ZNM&NjibE?dc^Q8p^6Z(u-uX)0*oxdGR!bZJ@HI{wF-Z&U^(h&)R} z;T{(c>I#G-RL|crm%GzS6wNL%!5B?eLNoZTCu)XhI`Pq$-doK3>~E!gV*Vuyq>??2 zwpbeI%eh8K$4v}hN&U;1mX`LVzrVQOTD%#3AAGlcAAGk}H8|dKI!`rp-BX5LqIdiF zIj-{jCg{B(aSQ^um%WnntI#o(qx9lYx;yoS7B-Lqip$VpL$QJDD}}Xd8ZiRnOYP@q z{;B1WsduyVN?}bF#>NAy&hY~;gN0;}o|Jue((crJ9<8u5D-kO-Ev>B)jveb}m6(3- zlf&}kjWakHN*D|_cE{$uYS%vTO<`~+f}2_A7h6O3V zUH`+t!NFmdvvk108J9fV@7uV*e4V^hnr2LFv#O@1#@Ju%y81vOm$EM?F@#>vTgoTd zEnZJ4^ku$o4Svb1yQ^!o*!TQ_8o;zxFyhIBO`9jz2`L)J6P#QHYM{f22R-wl8nbZk z!e+&PYhw?Or@&O^fMoNTUi{WT6g80P^6B{-xz#}m{ARNf&N5r_*@r+g%7aSPgJ!et z*Wrf7#xv4aum0&P5Dozk_HKN&{Q1$8igh(eb)j5e4UVTPX#?dI7L5wgK0o0{T$LL#u4#sDq<^!l$&OQ{~yIg%VJRR}ff znc9iOK2#ZCdsMWvPL|~(yG7@Z7N)}Zx-jt+0k=Wq2XTbl{axnq ztVJ?L26kG?zG_4PiJ zyM4n_D+Zvb5#iiuf9g$si;0H}ITSeVAf7pp1sr9;V30%L0>qiorTk4_RdKF3vZyKu zrAN?R9#=vpS{-TxJw6Ou@FvTnIF>yB*?-;{TcrmpQN96y!7DSiU_W1hnYUC^CdbgX z?O<`3jjRm0>YP-f%>1$=;ApMh$wVYQLUdr;4{~I>r)$bGG2t!eblf!7+8=icM~2*- zG3NSOGzu$$vOqUr?~D5eBi;kvihR;jvbSvAe3p6;HS(!!rT;26U8yhy3rJZykYLf; z62IvKM4@!!yv@C%qXQ6Tlo%aoT_>MSqokztv$wWxST8HDj4gdM{GI!3zk35HR;zAS zc|2=UN1Pelqm(P?o1wgeY5B;#|7eyzE~tunFe0ccw8VNvQAYVel+mq7uO>TEPEdV7DQ$-0 zn2+}`=Ig}M5PM=VIC!7`TA2Qur%Eq4fOkD35j(d~stjMg@nvA3#`lL(%#$Z(B~wAE*M198NCdCVul=IAm{bI5<8a>(i)+8ZXhSd z9SK^vm|@;>JQ(N|mFAwn@)X@1)%euJ;SSd(D}jnogv%!~Ciy?TeR({T>l^nVOB?1W zWErPHHI=QDZP2Mf##U2N_R?m_zOUn`V>$*WreyELkgUm;b(AYuJq0&?PYv%s zWk#m1U3#!n0QjV5*7*bAK0#jI+ot|$M`oYi+Pw|CRn?EB`pZp(4_8WM?n z5qv4DRIfcbGO{qVG_baMr(JV2xUH@2vZv?FK8D8L+qtw;tftHu&+aQ^@hjxgACO1X z_)PXcEZ2kidF`5_!2Hf^73D#3EFW>@$z?@Z+o~ug%IDhcK&_y&P^tAnf$mH@#=iewg{de#F4m+iM{P^)5ftt6c9s)us zIuG$jZM>-MK%Ci(*h23~zRBeHc(pk;W91{O{me6D-@bkR%r9T8#ZLN4=5D_#qaS{t zd3q9Il4qJ-3lkJZ-SjM~j{azsTCv$Kna(8}d2b_P$E)|VWe0MGXw ziZYQ|r)2aUPML6M?U1-%cFG{_(J>jf$>0VF#hUp=I!#&e`Mkd!tGi-;_{)!gwL7n) z%@A&^;TuIfl6ab!Eq$nkcZMr0qCqfMiFEIEDP$3I0Rz&)HqCK^T_;8Yclz#j<*WPNgJC1LdaK*Q+y)ddP^tovyF*vuu@C?+x9OARKi`T8=pNW_i^o@^VYqU zc3pW^lP~6nBA9WbVfaJW{Qb+-jvR5@W@_ta)AFn;ZYqlz%nnwI zpY)Y=ZOPPL8M%_B^`xlP`&;+f+k&fmo2TQLeyeo0+q>+4lCS36uGi9paBwew)Wo}oOA&744^@2i+p&zgAy>Re8fy|Ors*eTYrWJqS&)-F*q0*(9Bp?); z2wX*KaFZI-(&P1RGgaZgqq=S?|LI>0Xkv+e5{J}^#&5dT>sp;a}xEhlV$~9 zYAcIH#%j&{Enp;fEv}s(oPbZi zg?pcn&!(Nd4QZ2lZCxF1tvYCFRjc0jJlh2Hm2^jE)!7#@U3wt-USP$IZ;(spY~}Rw zbaU#ZJS|KyfZ-r%Y3aD#I!f@>SBZN-ob*_cpY5-oCqV=aargK4w;4sj5C4%U;8B(H zBBhrIhmO<4Lk)y$HYvR+hF6;Z1gN)Eov6(=iS~}xnD(6@+DlM60xjX4Sp|B`de`fB z5s%A!p(0s)ttmDpF+To^n{`|UF;@!JUkD8FZkGWyyVxQ%&GNgx4>1>}@JV=@$y>5cDq5aQQe<^-a97%Eqo`uPITPYQuA zRFE?*Ee)#csSDM>Ce(?p9lt%*3Dxzb2e`Y0-X|RnKK%Ur*7vSmG9j~AzGk-i<=R!3 z9^^(VSrhWR9NF}wgR}=ti;+c3sFRE7c@+)!`=la-|C1#j2iLL!%fCcs`R%HReiI|Jl5>Oe28V#LBpMTsfm+6EVsxDB`j_dqhMdJqeux58Q3z3`6C4>N z!&%9YTj8mMKIo*|P0NOrR(p0_Fnir@MbKb(WqC6e1z*0JI;o()sBndR$}gn>JZ?~s zzrVT{9Mc`Ys**eSqJdKr{AF&X1$a&^o?V-LCn3>Yafk_3oFFGM3;Rfgy~y}Gx@oL6 z$)K4QmsJL@wr{*0vXu+&u!({!Ko6?89eyb;Bqgn1{yFEPPte%9R|5FywcM7QefNIm zCWQjsTa$JG{eiZ>6SRHagYo>t>Ap=lb`ONsxt6XjGoTokLhQ&UhCoK$y6%#~oa`@k z8R)H9?8tXEk(BmI&`on&TAUrLNntLWB(t{I8jbD)?Wyuhi4y^Fd~iBm^{a7AdTROh z?w#1}&7bkvzag{)DmluOH!JffLhH@E)^V5qKnK3T3p88sWe(D9eknCsrKNK@Sy@>D zfG(4tw%mLVWm#*}T{_bBzQ24(WP2~1u$=@fBT6BzHJoCKP7 zt5`&t(tlnj$glhDhLe+%;%&H|ZP%{`fM;$EeE~o)(Ayip`t<43$x+d)HlQ;^z?jT- z>pVMD?|WD=hW)-eTe6!sc(BA-{~(*6*x@vydM8%cS2U%RQr@BoEnM1M&IX%>s!j?+ zF0r-$zUc5+C>w9J^Zbczu7MJnEP&Z=@UL1;uC1+gEIiyRW+%b> zz1=E?a*@XgnsW;wp>zECQbU9@0wv&7+rpQX_-zZ^g2J0S$hSdjW74_4*w#G;<^QBq z@58ZwP$*B7pcxg( z%WHEtPvVom$}^H_OT=V=^cxq;A7*lZCrWjQB*Lj+Oy8@&m7DPt+dObF*Hoc=@DK68 z$K^#Yoy!LQP*ql*xn+BU?8d43%Rj%|&?g+UILX%wb(7+M@v;$vV9TevwZ{1`b)79w zX)au;oY()eNauo4UTLTy*4vRMIZJ@eOF#b|yA}9&+Ty7>lIyO4hw+(*?p(@ z%m6k$5w(*A)=f5s;}sxzo7}Fk;-6y8QfLv2^xaXg4;<@~NT^1r38HyR@WbPJ8(phc z>;+9xB`vL#a6xNPyFmFf#$X&Fi+Nn<}$CVS3ivc(ts_IDb9Wus>K+LP>?N-^JCx{nzVpWUD((SFKxeNI%``8 z`hz30z**{L!aBE1{4>4$4N49Ty`?Ux29nYvH^3ez#UTN05Vdn5vqTvrh#y5SJFi|U z(Rx>y4?+-u&f3L=FQJnNwW*sDk$?5T2g!VB@HMTr#mNgB-Y8?QpW9PQ0^_*4_cpUZ zbPe%UAJ%!uDIkUE!qrp(_Y4LVmbYhIzkdBC^#AO>^nkwG4lCUKv(f~}@*lyMKfwE4 z_=hG|w$L13VkB zm8>Dr;aEGeUCz*;f_}CE+3m@JKLY_3$FT9Q8tmqC1QaxO&0nw;{3ljeKU{b#)5SLC+j$8PD~MghyPx+Pr31D{hDW(fbf4^(S|4rRZ6Do%zT zF$1ZoJkjB;5TNbvC==&C`K_vSILzt+u8~afud)O6uJ&ZsRa@!5s1xwwqP_15KfmJQucpR$M$X6~M7Z*|tP12R2{*skAmNzwbkh!7902~o0_1n^@!%u4t~s zDM#Ls6F6Ib`0k1&;}mUNb=M|m+f(;+4;hHFLmVNJc7a#eO04^w77l4je<8dw5q8ad z54^G&FlXsWzH5w&kCGrtKR88xrv0w4n2q#P;^Xy`LB^T(oMwo%tz^!`&|>|F^dGxJ zHGw&Ef$y%I2UXsvPAd}G*A=hay!o~0@qyKC;2O~S?>-qbi~u(VK|aS0XGMh$#XWR@ z`Eh>;W`qibdO=XJSF01<{~cRXX_`HL8h(esyN%|}_~j9g)1zY&_cF;bJ*PRA1?RC- z!Hg=JPv{-W*^RH&pSy8khwk}<_rU=ytC;&V#ilzKWm?LoswpYCv1Yq$v{Th}31R^C zR&NU#mp!%4H#^0*pKAc-C;~GtkZ9#j$?dS8ZR+{Gqt9g;re<&S4 z{zIHraf+{Di2l2*S1O`1CqIAk6%-7WyvR_vILx&e+Q6Z}ow9WX4Aitgk~Y9;!|B~3 zcAlk_vGIMQeAIT1ceyn+Yk%*(?ZauoPSbx$Y@S{|MphvbP@DHsOg00HUS_qOS8kFSSp%KwnW)kf|>D43Fwc? zOP}xpS2#K{Vn2zH({CNzPUB~|KyrNVgm%Fnofl)<-=Gz@_wfXVcAl=&Ad26N)SLGu zY}{b4^gI+97~e#`y_H)nHKVu%dljegp!+|kIGdi(T^zkv&V3&}C&vP1;wjW9!9`L=# zQc(7d8*c7kwN%#+8^tUvY&T#|8LY zngI6ni)d3}j%9vV%|j@2rOT^q@7rd{zbri!1Qe?&mN&a-fisd zxQz>LEgXBh|9ezqq$IDU$jPs7Q&ZC~P!Ght6EQS0s`|at>K9r2tQoNNX<%Tte;q<9 z>1UrxVQ#0fAYN5~_34{mb7$ZfvNU^|;s=Ac6dzNI14K1*X=EH%Dnd)o@x!L-n|;Sf zc?8#?`*<5NOLOn-VJJPfUMBgazz4DY`;Q+xHhcZobXcp`OK9(1rJg@H&W)0^`_>Rq zF5?A|QP}S1S)#bi%*?8YA{qTd`$3{2-$KN?tLIBNFLLF)&OhooKf&$H<;)|`(Ztig z!xGVln4dSn6kRdhH5blj74#N0=@rrI=nMUm}HvfFpK9-G2|HS^LWi*yxvL*2`jy&myQX@c6_ zAVz->`xCy-%zq~BnDM6kwdko=2b~U>Sr;k;+$jQ|;GH99KKTV7o4UfN=y2%JkinaK z1D$=1p~`p#l46&O_8uf|2cABN9k?MP$NWRw%ecw-n|7C`z!lBWx&Ip>;2@DsE2f1& zM%m46$l)UwT(fb*Vfl~F zvI$vE2cho;IGZ89xb&tO6F`a0OZ6(%gH}&u^&LVS;UHl%VpT&?*Re9svB_dPv24B& zk;h%VBXD;aXLsU3{KhTM68CHra%pI2NY&ETUc>yPDJ0W1p#EQ@I3RD_KLV)4%NW%3LGlxqMk@{^8|rg(ZF%b z6@@dPc6l7D9;_$(vtu`|eDtIhYfCQ#uGNnpKV}od*|;POiqLbRaZvxWR#Q`Zp#a3( z3bY~CCBSzl_umn0;ziwq(jI^79NZ&foB##j{cP`|el>`;W$Ls!Ge~Ni#8$FAN<8$~ zo6#b~dxXz~&oW;(zo9%z10fZ?(Xqlr@ zfOVh^y~fWxO`zkoorq$wZy)zJ?un0+WpSMkH}wj}{4)Brmc(MQjzNzDWHmL=cmQcX3=hY>Q;3I_5E$^}q zVCqHg1ius;I7MOe92)K%VINuvd!(d7PC`jKgl`aM?lXKp-Mcu=QeNB5!C|z4O<-&q zG$x&T9xv&70NmemsC-rN`u;nx7`VS(zO%VNz8HWpnByVm3xHEC)o`)f2{LPtmcaJ? z+J6xH@7=qn@c#XK00#dZ{DGk!bfntUtm-S(&kfRZWQ+4Fz{E_z!3;p!Q!Wh=LoS@1 z-vNm((G6V)vfzheWmd4j_q-NW02o=pCYS#+vH^AYr^|i^O({}{C?nWwZIcwH2XN|- z&2v-EWp_*%fi}MbMWAc2JDWs@B`-bDl&kM{i&l`$-R5XVUJ+MuUjFv}J+!57O2XN1 zle-gD{>bIZ6*RCI18sI>1@1+5xCuH8@knxN*`avx1=vOr0f>etU1MGBZ1~W-!iIWL z6;8iVdHwR}TG8P-C=-ULLc=d4(f?V2x|8HF(U#uc3Z?q_Oj<1QFxb`j4b*%ba6}fl zrf}1av_1y1JxMqVU$d>7lgy=g7cN2w8M7lppWSp~r0(#~n#bl1Z5LI;7jJ%o<|E$+XGSTINPUeo@UW5C|B{H zrxGx)MF&LeW`G##lpkl5&(@e|8F^N^Ltt^zd6S%@dR=?qi=f-~KU0R~lPWZ$BO`C^*c-T`o81FQNSE^MN9^S~o#K7%B1e1msr zY3F=Sw>}Pm+~W=$FtUO6QRohfX>S5LxEdY>;PS$0HJKClWQc+F@#7^K2mTb{s@X{ajWzFZhDMQm18VEq(+&r=t0wG!+BN0| zIr5SDnsIsv<&%#UbIB*@Y0c4Nn(-s?DerfXBX2$DKE;XKaxLzv$cJlHPW(R45B&b; zHh&)ugO{W zU5d%^Sy|1@)q9LPf6)wE>8T^54a0C}!PBQpOF^q$vn#_(i}mK1&n-x|v*Y`0EqW6@ zR)@fu=!L(!oF~bJSobi;en*kJYeLpqNb}kTm;uWQ?(R;lRD!<_Zh7$g_wR3dln@pl zM@L6Tpqn%pTI!#sdIVeIzU9-mI9X^A@JqepKLX6m%y^6K){Tl_1X6oA=5AcR{P!_b zbZWdub&mJ;f*>kIs{q)p$q`TWkiN;eE{8dU;qSpUHJ!?h;@%T|3sp!~ZI34HhgY*} z<55SS^#jtlTAoU}hJ0Z%=OP=16WyCcx|}&msBZX?TqJ8KxF*DLT$HE}2h3GOkTv%` zs*lJsl#bfEQz_0a310>yA|h;_(mB@;$V{`+U{rfn(0`1yW!hNb{oKceZ7miF_>sAu z@gL2H6%<1L_TE905$6wowX!s4BZLgVW8!pi=PI^#TjMbm4a2PWgRjEZ8f+C06bGyy znPF-5R~Qn~Ml%Y*dN=Z1u?6u~W-4LvzH#RC)7P(GYdAI_oFMhi-p? z8REX>f=D%@QPB;P@XmL|r#6l08iu4h`z*qJQ#Gb|KPQ-N?}A~QvI3ka@*jq?6cbM+ zxeAhZ@PxJT@=MHTQ(BqsP0SlnTL-Vf2XQ^demUx31_dtBw^pVX3|A2LFxcR8G%g7svWNHQUx!}6X|xXPt%rf z-AD+LO7C%^LSkZ~V;o^M2=lp+Er6*zl-5I_+Aeg@HltUtq`+K-U{-|B_(09tE2Po^ zDpm>Y-Q~==uYZE{P?A5~<%#45?HV|p?M;i5RCE8d2gxewk))LrBoO>fYX;ULdiElc z5&?K;GLr@*kRlzR#&<%>3k$#J^&r>JYihR0kr#)9c1I}^Y0VlRpG)U7ENa70$G}xp z-|+KWcm+16>ho%_Kt)4iiHT%AB)mN}%(;NtZkR!7EuDm^MuX%gH!1#H=3=Bb#P?kI z4#+C5Y2hR2=Uwk3)R{lLD^|W!sZ?>$PuErW%^F_@EmYtE;m3UfY7f!~JjoII(d)hq zp(N*7CJAX?m_}0K!y(85Lj^N@D4Iq2{uQu5S1Q);`&BT4bi0Jo*OrhD6#wH&KNk z^uS{^JTb`H7JZ*I-$rTmfD-e-$FW)6;xNRM{I0R&_>`2@Y4|yMsCj6jMg$?Cdx*JA z0k77+Q8%ZqjW=RH_VxAECQX~@)d{IgSJH6mZOV<4rM=^+7p<+W_w3+-h)_lZUo6TW z?gL-qTGhcx9HwIT9bc$WP;~o2Av86;e{TuvncU=z?PHtg(n<3ZMq2w7{dh3cwM_ej zI0ZSmm0Pf#@|NJ!d~kI&#+a&u=;cNPD(BEE1bV$FMMy>gZ@<5M`4Y4@+W-^YpX0$P zG~z*FjfL6(Ky_X+$GU{jG{wU632 zcwF0n-lAF(^pt-6sK!j~dk^DMado`s@CuFzU&h(R4@gGpuEEdw(vUQ+zX;{U_6d52<>gayHiUKBkB?LrUU)DBH&r(#5-^LuRI?T9+XDjt~BGPa3d-k8a&y zH)HZ28g2oH!``@d?Q{L6-O{#CaQ8KVY9tAxY6Eo0cre|0do^IT@J<87Wj`9XPJn7Z&{@IWY9`8 zd3pIfP(RxxFeNtn3i*{;DUFtoBqaly*=~IZjq-!$;i}Id%?5~8>z;xtr*7Liz`8D< zlTwS$Ht_CyJ3?t~E;7$uE2^vv3;;7K$?xds7-Yj?@Y#}7O6F^xU>lx&r!d!C@FNUY z)#bJxxpgBVoD^~+Rlfp+JRRzaN zwJkMx5EJ7NVb><($jGRzGTlBQx^seqxCE}s24eCMX?_wzB~%@px}jn`R!2y_fqg5tFAxL7i_LRwFw|v-ngJV$pYDzkypt!a zu)N&A7yh%>2Z!Q`gH%h@*s`S9L(#Um<&Z|mAeN zO~dLUYhLq<7rS>O*AAk+bGwv6?|#pM0QA*U-6aP=ngFh5Kq1+fDFB}9Pf~OH1c&_T zHYDpDVCx&>g6pTZZ=igqE?GD2253^DbpzKv-Mgfq}vdVh$$42Ee%=;0hN$cyg z%%*%l3NCif7;=jt8B8+YM?Kl*qPcnbbI{M6Qjlj3J9dt;eETQ_loT7zF zjDLL2TmcyA&A)#6LZiP4=vsQV=tM`7Y+x@d`88=*%Vg23jnJ_ioYpl)zW{1_{H?8` zs>jz4e~*E2ydn8I;l6=GvJ?cQ{VOxD5%$qJe?z`OyXt=?3<*u574A(6Fzg=Wxv};w zPFAvLtybm6cN)n}A!8s?h$RIrJYlsTUxf>V_+n85=kW^xNH>{@>Pr&c^>|Ex^iUI| z2GQ(@pSvOA(?0pZ3*b!fiz3SR?O-@wqZ}o9kSI##nvShOe6IFZ6s54ozr+*v+aCUK z6^O;lUYKiIXcS6U+P1AM-0p5~oO6&g2jEvO_n9sMmtbFJn z0ol9$Ju>U>H`!m6nv#&P=!C=mO>r;q7CNJcp^77=tgWmxO)I$nCr1SFO^f<~&Bcos z9|F90X;fPqj^yK%_m!p2fD=0@Q-tf$ zwifyrNfS(!GId63ab@)ppbe)Bu=P=t*6hiJ3iTX1m(x?zriBqCYtC^)zM#0L$#4F0 zaB#2#7G;y{QGG=^jJO?tm(;9Rw^mqK=$->MZx|!37ll27?nfd81j3vOZlzQ5#z6+- zv;I`??dun8VWA$Kx>8Fj3rssn+}@N$^0mN^?5Rg1Y*%t~asuV4o)`Kw#oWrh1q$p} z;Z*!$=tezs3L5uV_#5zhIBY;y9O1_;Pr)-b_z}n*N)s~{sGmQ7wjM_z9-edTsRTC3!BYqHqPq zT&^0^hD_p0{J_u%ZVF`kd$R_Y{jG8}pxwMzQ8`*r9e_$A3$7X6CtQ~v(gfl7Y{K$mF9vE!09iUeKEDx? zL;3#AHCD{fO`0Y zF zu$jzrY(a&N%q^)z>86QLDXSK^PwXM=>xWrK9rXUEBM3x6abTluoot+Cp$2nVz_AV? zKJ-X-xwci1-u|N}iZ0`6sPjv-D%!)fM2M|xE&LMPO*{2u-?=JrJ0L6Cf z=D<=UkQSFPpY^1#l$;bO2zJMhEP_(J4X#=14Xd@a)Zjy0l!Cuh0$b-OJ@f#MjLT6a z{;Y6OX9Uwq}Lv`egNCChvjqP7^-uVUxx0E0{MCjRa6PcuY;xg3OS$)j^)e|?T*O>I&K$&o`ghYue%1cFD{`AAsqO$nq5 zm15m9ITzA>aSOsp3bH>(Br6O(yLV41bq#AGBdxgV?%TT=QCdNn#?69r7shsd9 zDcsb716B45auTA9B04yqJ*$PqmEgOi0ewu>0dzm;lWK?>Q+m!aZaqaoQ=v@E`X5Bg z=OH<I~%*OTd*I9pA^dge=PA^Jg!4|iC9fw7u%`dzn zdOTC8_U_)mKTfon(Md4`2c0@ksxYIE9#2?Y$*^CI2mNNO=H%oo>$EAGfp-3i2DXEk zuRtx*AXluD48k>PV&zk-DIi!rj;1%TehA?DO$*n6#83ebi;#DMzlFql1l!|Bs7SY4 zxFle}`%06h&x?}RkHe6lcd6p~XSOjc`v895!YyA0ahQUa=vFs1HLc!-ba|MXx;6!w zd~nl(Eb>{tx5(5ncN}H2Z01np14y?FT|UJm+|a8%)N3+M~7T0a-Wz=w7?LljB6FShpt4wnSxeYR<#7%0sQU~!89 z;nF@qc5*{{Xc1y_^XH_2hXj9nAbgR}D!j#=&XwM-QMY7z_TapV_)B^i&pV{U&`1bAF{bU^kB9ZMnKf%9dL0fHL1!Pj0R$mc+ zqFg2=N>bB*#%WXV{iCyL;9+9o5^!gLY){-lbsOVx5+<0SgJ|n6r~-+IhyW$VjVOS} zQCUn=1syEIT<#`4yt`E9jp*zEq$~m`S!I*oa(q}GJ-4ckcHSO_EVxX--#`!1k*o(0 zXrGmp&EM%ULLh9sn@Js$@^C=T4oKiV$YwGrB*w$}U}mjM1>_#q8tbDkF;|+t6q)3q zFoB8$CO4ksdjn^=8>yh`JH?O!@)#k19!g4wcm+_)3P_d$R3RGZVP$K3-q};HCJmqF3AY9asjs-nA!HC@HGHVp5p0oQ(Y3XW0$SO*!IDoDjupw^% zOI87lBj!SBGI@M%Y3(@x;uht`3mB@3P90PF%K3_&C50fRxfwWh+Ov_Bb&{J~Qf4@p zE{%gLT$4s$S9!*}uQr}Ee-l3f!tIA@ff-m$rh^^_jfBM>;pkt{2q^rJ}oadlB| zFBNq3scCAi6j|rbu{JSDawDr+Y)u%Wv`*r1AL~%vZ;f^9QZpzil1T0L5zke?>}y1D zHns$~6*zZxX~hw8!x|%urV4dt1pMyfI6;LV91DR(QvN2@1_d7aL6 z0YvSZoQvDY)ECSK(i2PIc$+!!l~0qVLr5f-{`7riACk!=paOS7wTI;ESTj)b++7Gk zx#r@D@l^5yehYK|LeE7*e zs^<~rav^NWB7A*o+z^2Bx~C)qU@#K-3~DTUpvEGI%x_c+Kr8mXNrtT@`5NFygusz( zH)@lZob`X4;g{Rd2V`9 zo`)EjMEdGo0mtrUPJz^pA)mQ$C9;w6Fi8|v^})Gx?GfPP8UL!9TuB~;Vis7zx0}lT z_2AL1n-=tuEjXM;)37;r$)A+gfs)>FDI0V1#o>)ah*J>LOUM+kw~|<7ikeuYH4-^c z{+m%jd?$J+0_o<69{~qe4NBV&&e7tK4+p|Y16YUwWmI&{A#_TDN3}8;S<^xaGX`Qd~RB}CgN$d3KHyI!IuCTv#YaD9e3UAf45259Ii1OVBWYNyi zB%oQ_=E%5Zqs+Dir~Iy^lXPVSq8g|kbv9b zsE1Ef!|-pJZE8^4z>n)95EB-qy|)1Njyoq@_l9&)0vY{Y%_fCV4w-GNNB^Y?0;>As*q-Enk77J(#&jWs{l}K1L1Q_|gwY9atK^B6OGYD(|vTbGNGO#e7_^d<^EkWc_)>YrwI^Buz_8U(rHG z)zMWP{N5@I{PVFaV8@UzZU>Mxq%uvkvk8U{<)$n6krGl_F5P*cN4aqvEY|6@HH3~6 z0KVe?BmuSz*R4C{-V{#2hr^%Jz&c4lLW)!pM=lLb{B}3Xq1I`)+C;VKrMo05l=A^3 z?1Z<+B3U1UYg`U&h*w6>Cc=<5BVSw68lnE^^M##YCy=`LAc1Amco}&y@D7FWDG3Of z3iQzVrUfUwy##VNY5IwMf?HYn7eYEB-M*xZ#ldy{-_*Qb?Nli`R(t8A5iRYt;FDpWpK@JkPwSb9&9(pZmV<_w~Ns*D|3uuB#nB#CZsTKpeh$1%pE% z=pG;t`{WML!oPgd@ajh(s2*Bhx^%+@r>1h^s_G?aaXD#e;d3J52!!A>w`UdVG#6Pz zlqZp$o`EPfw(!?FD9*R(g;y4+ORk_ir9LEFx-wBzb+PeQ!C^tXv{CTR{((ci4_^vC z)D0_1i+H?Zhr%>gkGE}pms#SGaHvOOiRkn+I-IpO)06e+uN7WR>(`D7f6;Eepeywz zYs?D&&u=HScy0$p_uMYhPRRh_^(z&X-)*@TpXSX*|7h7!oue$X-R)xBgHFMIIXP@2WR-Z$@FTYmlIq>cOT z?(ZJ_iWhy4q0(#LqMWkpj`kNtZ1hq?VqPdb{H0cYg?;HZqsfBdvaQxkf5_v*vQK>utlg^E|E})*m-HJP5V? z$zGkH{zHzFE$QsnXdzp;~>m)>rVT7w+>!u7`u2 zgZe=e8+m*E!HLQ|{V;0kuUAxft`vQ}C!wG~lCN{w6_+qQvQ)q%L6VnP@~<=6yzBD) z6M5sB`+EZ)CDQ8VW!U8Zd;LEP{9kT?f+I&Y=nz(S>P7_RPg|^}amfLN}4PQ5oHp$j-)=GVID`BHz~0QHGkx zL`{U@Zy;zW@6^^f$S~{OFuqWlCP*tGA+c?g@1fQ0Wo4A#m+f7XlKL!Znitck&OcF$ zmmPLwqkJS#ZEC;0NniHNMjs9}?p=V|#MP5c`m26>Q;a@jO_DF2M zwDg@s+C(mH;5pP9x7DI}xMjEwQdSw$LO-BFDe)kU&$RwJIJvkMOBeK3N`o%#Sd9^R zqN1Y0gAi@hXw6vrdj+aqu>}%QxIGdockFwH2e{_9)p2x!2 zTW?9u;Z&ErY@)XXruu{L*l=dw%+ciQr1Qu^QIrKLEr&>q#d){Bd-v|P3|ACwP6w60 zKBoF4>Lb2f31x3D$x2zSUhf7q4W{wu&!5lm zJDcvRz=S1UEZ63b*r1u)~ZlE;V)cBYy1}-qXl;-Dr9hHPPFwLPc3QiRP5p1q_#> zr=ucg#QaA&WydczIy$%28jn*T)MFx9`q@NmEmlXTMw;L%trp-hF!wvr($aF*JHO9o zA|@mq2{)vg(?+$Ep_@p^NDUG zeq(z_IiL8`4NVB=Pjib&mA=i|q;FwqX@D3M%brWs{?x_a z13TT#wDjtWeVK8eeVI>=uO)>hf&#xk|W+lueBV^9;NA5KwIHd^k8H3^ge zgo!bQH265TXEUvWd7HrhhlAdZ1wU_ZUY}m^?wEI#SALGGltz8bEx)W66I*g8*Vx#= zz<2RhPDd}`H^2h3RpYrPSC2$a;@4{MYZE73_7H3!2(EVNi<_Sq%0A%VsM|U#I>^O_ z?=+fqM)|8>fL4qJTYnJ>RrfDc-qMS?jcLsH466TKphUNpdbv?*@%`v=*z+Q|ZlnO) zp6{9*5qv&aXUw4!l0gxYbL-RmXMUD-J79Ic4)O`VmABB9Y<@@nc}{fZYW;ECee z{1Y}4o%Ca-Idl7z#6t}zYa$Y}tmF~(l9dx-XXjJm<10#g?i>EL>Np!VaW%d3wRf{b z%3`QlOXRRbmak$_Q&STYtLkDxbXPj}QdJL5i}m*9#^?FfzdmXXc}_-n$ixM^!LxKQ zTdm;8ioY2b&5(@Va*3*+W3NM=6v=l}p*4kKDd?$!{eNg{IXWwrk z=4q+LIvRsZc&r>APb3;?$4o^nZ*HGWWW0pIY(2+&dSO1LG`Z}aK+X3L$Wm5B=wCX> zupq|Bqo^BoIepWmeZ}m&3_)`6eu=-oy?wjy812gDc4{v1Cos9lefid713Eg}5k*3~ z?hs|~bep;tudlE7VH$1j+A%JCUAuxADsub7y`_;X9An6wygZ&ze6f6UnNTZ*dPQM5 zc6%owp3Crgb%+>w(Ost&f4CSWis`d9jiOtBZNf!Q*|6wD8@F^n)>!oqtOr=gn-~ z#60QvrA-NKdG1wy8u@(+iSg+MFN1@F$v~3I(Kv7KYt~f0k1L|P>RuCn)|-g+ce}Uz3mc6*s&r+@Yf zD0kqnQ4sHniA2CmQ+&qKtSbOx^cWIMayq#o^DzHJomykE{$FHMmo|leM`&ctZ0v^P z>ttg)uAwq?8$b)4L`sSn8 zDQtWMsmz7!h;$h|aM?cFzA&Gi4W} ztXt4V@yVvQL3FoW+B2QoNtI^{1j8nLEG;bZqcc?AuYoZt%Z6C^ALXVg(LX+$9XEASNt5Nr}irwIdPQGz5O zOf-S1S`IdQg!KKp|0|(ZBip6D(A0%rt7B(puPdAuQcx`$+|I*$>R=QX6_vJcVth@v z1NJaiDh!x2doNdamasg%v_1sUt_2{?NE_tPKWk8RHNP)0IWf^k6KlDPdYmqmc_D7@L=AY2#PaAej%KZ$2ka#P5r4}H9XA|i(Xkoq=O02>Fdg7E&0zN(|N9tKJNCP8pf{~4El@=-zlaseEV5fD^gk)W~ zI&(dx)2N*u6R{uC+!l%6>X_xptx-X-F#IkT)`VhO6FLxnNx?lmJ)=?df+098_tJxo zVTfp(;c08Xqj8zjLPu`g)G2|@|9)$-P@U0WdwSS@)dw|^f4T7#d%>qgU4=uHBS|+Y ziy_*8>XVXPN=Q5Al; zR~hrNOM{1{Vj71N|MklvO-TE~uxOF{F#t<1& zQmm+Be8hTvAHXtMaq@RHuvq|3r^i?meI6}hYe{mK2Ff)C@{fQRJq_#LRX&Cc!k2&6 z8*IR@b>n57-u_|7j6k>&p&qyG9sJr7y`iO_(t-luJY1TuH?oVox@!cCvlHEAUg$$V zc6hExDBz`F7%wLSZLAcx#ei$oFgYAggLGOH5b`j?B*{8|d+&Lq_s?A?!NRlLk@zZU zvsWwY^~$5<)m8guZ-uv@o>t!s&egrZRid_h2R;MWYN%@Yu0qtnb(>}Y>gO&uGGQ7e z%v?r<7U+;pEdn`v%q4cT`aPp#%>Nc61cdsC#QU{Etz?Q6`>3Jopikj1ptAK%G{G_? zRfDM{Q>ZoI7Hg9Y7}!4P+13dFfD>{t%FN|E!eLDp;>1;(J@ZPo)6XGDCT?ykH*CA? z$8-~!wrD_v_sxG_uLtCd+5pX_;5I7zt8osQUIvg8gr)V@m6!Kh%U^34;Y89S#fz`b za7RIS;F2q-C`SWswM(pIZ$VavuFZ&Sa|7yPt8b^i;D~S8hVBQ?(O;e z=Z%2{K@OcK4JkN|)my~8(#{5I;R66mdoVg;8U-o&qKExN0%dhXgf@6sI;d$o%+Q+; zt!%1VTP9V*wHBEjbn`3T#h`X$`R^JF26m`pr#~PUXWR?}w7c~LzS$L4fGiv0M6UA4 zIiGa+RrzN%9IRFqb-7VEvBnCUbp%-_si3g=uFc5j4!A{H_jI)~;SKzJ4Iat+2UjUX zSbpob`MaikT1Y=@aBk-YrurO8WZ>b{uGlQ~9#gxYpq%~=ytpGSLkvi)^(hc)J+0Q* z0ik-{BLDnhNO6>(;=J9k)}_GD7tRt~RWZIMPEL!e8X;c~!HoqMO4F?!+uQC|2!8Dy zTMj*Cub(Lz*X#Anh9>_+im+~_j*bpQE%F_}>Hu5LXdLk7D(JDFE8HEnk)U!r?w8!D zdsALsUN$Y32%9zHLhg#8cCfZ>+aE)96?(V7u;m>7pM@b3oF**C3*V^~3Y!S&BhirY z{Lg`)>drt;sf~@Yk#A*;{#cac0IL?1m&=a2#%U~TBNu<+`-#WC%BVK_sYP-fbLiao zV^<0fh9oT&0{QEg_+za#vD3rJ*DVKmmj;0mRCRRnAsOK-{eHz56;>paZ(VX{w+~3S zPn!F?id)sNOydC8Sc}8B`Hl-_s5E$H54Dn)!4G)Ab>r01I~S9de%>L!{EX_A4>d?p3jv#*WL4>Zr7#ck08(kh>f2 zpYgYpE^z)a&SIKk{rYAfa&1+A-!B;f&KGfz|0L&agQ=E8m2UWKNAXY03)k+!?t)ph7?Wd4Oiauz=k`n&*lG23 zFgm8%!`=ONLT+_CCC`-oJng#UkDbnb*AbubDaL&LahhNrWSOyGbX(wI8K8>GjjBh= zhgOhIOZ)QnjQ;t6XgiHTAY?Aun53bfGe9;+k&E3>o5{CbDG_M|CN2S|Yr$cSc`q|c z=0eO+2FR>H%=jmxK{3I;Z1W%dT1x)_E?NyyG}f1#Pakz#5#q;XQk#l^}{BPb~>Su&9Gkw7j+n{g88;A`FQU69wXw>^tW9OJASg?x6`3aOkc&mSXLC-E zGodCT4;t>dv@nXJ;N97T`wb+)hu&$Ytfrg((~K)5G|BU<`Qll|fU~ zAe~4c%rfS6Z7F65Z2NZ~ZTST`l1FB=lEhm|4(n~I7n2VF@%*?ifAa*W>mRQ2yjRe3 znrL)j*@Nh==ctJwIZ`Z-tfL98X~wu~&RsNOuO8QOkF2^QiJFfeTcIqC-~^dUSsHYi zcA@*1*?*&<)?n?Y5=Yd{muHgrTJ z6VJ3d7xvuW?gx4=mUbBjJBHys4KW66Z8Zxf5SF-slCGQ*S8O4eQw)f_GN>$PfL19h~0abWmMWgbRfx+3#pED(hoXD z*)~D|3kC^wZr??tYWwd!j_xwRY<--aoh>0%H3LHyQ_{Z7=)6$8^HP_1+kD;bj;^7h z9}ZJ3fvT;5*nGunGaXsTflMsQUZWm5hFs;6bH4Y*aj$+97z)AEHV<^UU+pxrxtR`l@xg*_rwHFC{Py{o$=mLNrb};rzSUg9={1 zzFj`Zh3pK&XR0+;@lOoY_^g(;yC=h6{^n4h^0&X%;@4aU)f`FgJ@IbxPsVtYULd!* zBqw*PT&k_S8V1;4D+r)*7-C#hMWTzT>8qcv^5lpTeqYN{C7$kyFIn1JeJhuE?Nu9l zkc*Lc&z-M1#kDqTr=hSB7isvd6}gy&2SIaU&*;yA$olWekyh0jQ)QC*Q7FQ4Io@;h z)2B~n?R0QubqTnQ-e{Z;b!Qxl97$JC&#Rn8_U@e0_zeFv2?2Mz% zyq+w-#PK6U8`C&~UyG-hWSIZFHF>0l4mDANbh6HwL#5E}3Zv$&5veT`40F&#&sn3m+OlTkZ{50uB5mnv3iD48kc*|T zz7pvb?@*No`2|HQ%88#}WrKpK6=tZ*m3eRXR}*8{?^C%)gHE=@_-0LWS-uWSTT_NI z(g=jodfzcfWtV`PEw=lu+~YUNb-RV`RH<2EC0?D%r5@xFOFPAK`0E@Bl9Ex5Zt=fi zWV|7yV}pM02>oavJpIXGG@(P1d*a%RD%l5G0{NkM4hC8WsIjd;9saI&B-vc$al?28 zYyEKx)Pw?RLIdpX*Suk-%!w3@!nX@F4>IhYMMcW|!OY8p-EYk3Oj_Q{jV8JnT}4Nj zg&pcG5)$QQ3~`uV-7LGM)1!{o=-#>|@Z;Nu=UROvx4Y}M7z1|X>fCVEY6gC-9m0sB zoxcp%{yk|si-Q0y3^}wU4+E_SWbyyt5;C~|$%S$vSUa+0((h<})#ohj%=R+G$mdXe z-EpKFW;S$MLGKf-?^Zc4?0WrSH7L^B;-$ceezxvDyY|aL<0BX&c zZn+b8wJoR|=voVJAyV(lAD}FFL7Q?lf}}0rfXu{pDmTLx=xbdc-OmMz6Vmpzg>k3u zi+hhwz)5o>OhT&ABww4fr=|bR0vKaQTrL$$yHk-LlAA`u4-}VE0!g5uT}vGc!{>hq z1Ojx5B1uWN6Wia-OYITQZD0CG{<~<>e*wi8@ku zbNB1wr4Yu5gumxG3FqnET#4=?B{II9m*isfP^W^OhT>&MT=&ism;v5W+W#L%=_(M} z1O!N2r!`ZCG)^smR#orj`*WI%eYA0bomQ?wB;TTGu33?WaWUt1jh49R9{k!h%+TOp z@1U$FaDueeN&zoyjh|GVXhEfh;R?qe31Q?en&hD@*T3>Hg9b56D@WRW;Zmz`idcx0@k3A-Nt+*yTi)$t3T^LlaIbeuRLDwRILx#O2d2?Nt`~0&O4UT0VaKXg{!D zv0W}e@)OOTsKHZ2T_dQE5ac2u!=+%!ZkBpPbv_hd#mFr$Y5T`^^^#DNx^|^~-S9hg zagY1B2z0ZQBk+@d@e;=ds6O~%&qqS>PThL_6@T6n^(g)2Uc~2l35IwYM8^~fnG!n` zBNN)+wMV-|PEAb>-=a>9#5W+FJpMu;BN)*45RjdVm+f_BbkFF{)KqJ)nC5uy4vjYt z0E^*j@ZKpESzzz`rji`+dpDYF89PrMiC+UfT|B)BR@%qXV?0zBsWHpPMY0v{c_Ijg+)b0$33Y2s0{3XI}3%Kz57||56Pp1 z2FZjZ>$kCNFBHg++7Uv{%lWP?l>dzcd(SnMBZ)$x+eV&Fp+RJX0(K}H>i>mCMg(Oo zO70laFb=unbVhN(_S}SFeY)=12M!Lhd(yD}3+zl1fYGU&Xnu%f7fYtDl|ap(hbO)M z?@5q@etr`094FuFdZ4sy+_7Ar(rX0Se*3>CJ!9c`NhLGn*nlmp-Lm@WJN2^!V}*w78bzFq!Vt8ZMhzx1Hb=Aqgc3t>_5KV-gvuqZiGv&Z@m z=ghpkIH@qeR{p29$_SIgu8WtP1p?B%fvMwW2uwo^9XfFre zn*-o{Lm}pf%VkqZno`NZ(;h#+WDOfpN1-MtneAWd-CL~T|0CXOWl1j3hq(*@71Y>J zfqEfaPTo%J-h&r(;hb&yVR(ID64zpSS=3(XNcyiJ834EEEQM!&k^?0i1wP?gM88JG z8%g2TKZcrv1+-0|ZgmbdZ!J&PZ7s_+u%F8bg|ZLDzxZ!Q3g8B;P%n`nEN5l7h$Y9w z#=fXWrbd)_)ldE(?+Fkm`37{$#VNT{DUUrCQ#el9=U7fO%yyi(Yi*5&SU++UnW>GP z%UBXS4-4^=xX^PySM4HD6LoN-R(}b>y&&6Ii#ARG@+F~Oixd@1|9;=cqN^0`_&^ZM8Q)!$t)z9OjW(tn>+MS0S*q@_(9Heke+ zG2w+vWKWNd!h=3vX>d%}AD{no%s~pdgndg=j(1U_yitlXg0;H9> zf?;U;W_*HHEz~ZDV_cMaT-gL>mZ!Tdr~>r3kpV17kH2BvqzawLh^RM?aoLH$dtQW$ zqgyZTNm6n8OWxTGd^zd!t4r>U&{vqruk)8iiNG|+aKP^NZAd1bEPDFtwR?$>hK@yC z)@iQ*pPcek6oFdREGXBMA|VAGof2)VPGMfp{gQUw%9R7LzBeZqgLap;$4+&$dMfg$ zwR=Uo@btKy=VA=W&eTjbg4C`RDGv3%2K@WnTQ+gHkE&WZY^sG&?1UD&Pk35+de8mg zO3RKEN?)%OUoMQgK0jJF%4CdiPoBxEvX@4s@E|)tQ`mr5yB*mw*)o+hI8$fQJn0ng zl6TjRzw=tV+*Fcf?{394bepSuas{N|?pSNH;?DiMpOx7yx46A0znduV#bsn}KWUjnp~6;LWQ?Bp zSylLa&zChzww6{(H{ofCHU8l@Rw!|Z6w$J@L3YXR;Z$w3+Af3&IprFwYaQXUkhis^ zcUrwMI+qGJ*9zt)2!?g5Y9*ZsyGtMvr;3CeU4=Ti^Qi>sP9XsOYFH7b98D zeYW4~d95$P^Je!ByKkVO&1kLPE;L8B3xCI+b1y$~`g-HXkJxVUj3!xBEp*oAUR?^= zQl~1-^{AfYjs$O)y1Ir^g&dd*E8We=M@St)l~lkIV6iI6!GGD-sZ&cXy8G zgevAvz}5qHV;NIR;V6?DZT}ziO##!X9fHwnh0@I@qM#*dCCLdxBb9kGJz#anQT@_8 zf8>G2TLJ-54EfEVqwLnV#F1rzbY+C+Q>f0L=SLacikTXLvT_(c+@;+Im1d};lSw23 z9>P~PALCb0G9-iM0gKdgy=3l**8OwMOa0U{4&VLDHy=l+cug9ame+22nuR#$$h@CS zg6D8T9i|3~Z<`I(mLin?S= zczn;3DQ6mfuz3PI@?G(s+OpjC#W#0y1zXRBrFs4ce|ub%x(``6aKGdyQ@~ooWMv+7 zkQaacyl)5@JTx+&Bb}TTc}5|%jErcR)V!(V)-cJtyH@8pJYz)Szy{j=1Rz!~$9MRC zuRX0eelXv*Y>?6|&=hQSE)@*>#aCq&p5&r{k{)6B{qng2k9Esj`s`!#@bmxPQ_kLm z;>M-jPtBImA1voDj|fJEbokA?EHy7(PS;XOm#qzebU%M)qc>^E5~9d6K!dUJ@4~3z zimS#b#9-WQHblnFy3x97Xy7M8nHFSN$zD0c$mrI-fUmr&jZEC~u0Nt6*_jpTGY;7P zlcn7CL%O>{}P&&QbSWuytF_1r%?xmbRlCo20^c1hu6#!>el&OAI8q{ZgK z=jnJ3C}Z`M9BxiuobJwCg5Q}bDUVb|uU{Dol|gd{IxNPqNJ^V@%_(n1J7~9Fsa-XuZm)1XCyu?YrSGt(2}i%iOVz@ z$JspKJ6Y}Ty8_%C)vGC35dbwNoxYU_ITAml3DLNX*$|~=zDXg3yPymwLi@g{DW98s za#RM4I=ZrzUT>GXo+OLR&d%0Qix+T+!W+v%1kDAwxW@qR)EemNRs3oUJtBIXF+>!L zHsnTXjucyVs3u>}JTD>f{7&vIDV``9_c6sA^N%0Kq6@S*@;fg2-_RNYe8^7z__=bd zB^`8Xf{PMV)d)hZn(d!H`GWDb$@sP9o$bxo#Emg&C|3f=*=*S(`-OS~IT#n}*pPZN=W=+g4?HJCf|^}%MrNNQ=bqwEtOa*fQ-frC*q%}A#cC?)qF zpZOHx1DAdc)@=`V7#bPvz-+2fFN}f$I$dT&zm(pc!+(#*$La0*-Osg=ma>hH=<Kz%w_Pj7o;E3_PySuY}I+1}<44v=4k}YZsd>0el)%vl> z*S%`k-??gdo>lc(FPU#(rbRN=a!9-Wx|d#q^U# zAdNl#jaK?}F9sQ7r**gS+ZFI=iP>8!rS=_I)-s zHr5!bd@9hQOpcHDx~ms++>B>Hjueqrc4PTn#$dmOVi>-24`d(D{S(jG5SR&z4(@In0RqYf*y`0zAcd2Rs_aoWyX`dLkPUjMdPnVA~A zzkNqk$4cvI{zZlx%s4^S>g@~2Zlup!*NcyAMH1V-7sbx{xw~5eTrAqnHJFlI4(8Z*SaIP>ANbQJ{jVzYnV%$B)!JtGC{P^v~Z6j+RLkXl# zetUa+*+IUYD}5{K)DP#R&@1< zz$z+51{02YgWI>+WdcKgexeY*0%hiK=Uxv{rxQT!a_QAmEv{CMz$&NK`qn68w`|r{;GWPb-N}IAvkJJyRF0S$+rISLMUODh@IXW>8_Q zWPW#=z<~H1%uGGhck(_*lcVmtIi3_c6a5%-_Cvwflu~&GSq}{oqE~mfNXopw%WtC6 zi0_(MkXjLV3@RbRS|cn?QZ91ol&aQ9y7is#KDW-UPwnlf*j6vY$G0u06c3IKBK+x3 z96r(4)6-PZk8vAI=iRnytstdrwg?Lg-sJ8u&{8%%*<@}oVa-5=H}?Gk{b3%&)$J@fY8TbqK06~LE?ODdVp{F z?%0gkm^$Y4I4haAJ+_xI!#V*b`6RyD{*HH*-|qNu@7MIm$jIa2K)$r*=KZX2l!~oi ztw%Mw-@yQg-JpLykrNkrD1vhhmsR=UEKI(k1tMFsfXlmnt=^Y~vat!{*r;2#7e=~Zlcb1I}+4{e)@arlPj%_y-lg;RYlcRZ1XjY~;k zqoJW`o}Cr?H`+GgMB3vf<~aQ8$1pX`6=-mh0t1+tZV|09HNiX~NNV8*X+??^TEC0|4+ zAq*6lJHrZu=wUGBZDZrUL&4OxgXe=p9&tQSc=iM?U3{MaiYT$@IYcc+gLro3n-*W= z5ooBE&Xv)uJt7j>5Asou9kkW_wzTej-~l({7-Ex3@z59@;_^6c-O}vZDI&M>`UlMs zSs59RD^aA^MQmtR7TVRr!_3*a=u4CK#?*PiJeSb-$9XqC zhm1sbkd!m52OwpXrss1-n==Q#Eulf?zUitowX!78ninCtevOw3rni=N87sWbf2`p#L71NjHg=E5G-vCWFRob`1GPg;BCBpNd8 z@>taQ-?~0$UThR(SloJ(acQ=2}(LZoJ$r8_^vULh^32Pmf;RR$EfO_Pa1$=BrSZOVtyK^Xz8-Yetjk|B| zu-YFm2=r;m|Ju_t<$0S2nVbjRsDPj}!9KW2nJef9pfWy$zpBgDDhcE3=#qR-|juN4n4z898oL008f@9oK_CP&TU^p%6SCVxC z2PdZr>Y42Q<)uX>)(gsk6-UoT;$MF(SPI9#eyfT??!R!M`%NHBQ`!yAP;I?P6C`&@ z+6j9`X@3RcO?jP7;FLvlIjWsHPdm1IMCHjlzs_iyrDiX0ywKLW?>6$;dVa@U5=)LDKM@%k*l_r4>* zm+Gmo5_2eqMFp>!P2BK86AJ7$n{XdzV3g^^Bi+-F##SRwy?&?lc|XDokwUXTk8nb0 zBZLrMj*Svp8eU@$=z8^IRz6UO-dqp(Nbd+Wv!L513sUDui__EVnT0;9_6$r+sq=gl zrmTJ0m^Z#;pV7_UD`;gij^?U=qiq-(tlllX6-8?4T+QsJAVc7}+}9r@vxxfSdzUd^ z3d)~pVlZl|71|rDM6s5W)YZwAu z9Le?5bPLDJCzE}`l^xz4_M_KS-j=*%oW|+gDqL2bQBtxZ(bm=$yFciy1;P{oqfEZn z1=5J-dTid_-Y{E3^{K5b*$QrYndbe?ymM(Aplmvrf>lys!nrPg#ygz(A#7O(o+d6P z7KlMnwH95^-1I1IULC+hD0WRIijg(I{D*n zGb)4lGyD;=zoTuxmUG2Wysfi{8c+Snz-`11oqa6%!9>TPr+;iA-1hW-2Gf*PJI3KBXwAk+WDhCf)3T75=(&{=~_VvaC@z0Dy zCp-?>-~MD&{(!uTxbp6>kFetC6h#C*_M1FD`a`$IdnHp}Uw_n=<1%_}YP%M5Qv#R8 z>OMadT39H~dMNl*E*Qknk*N~diEg}G7!Fd-66Rk{JvMy=l&K^+W+RRwp3k~@ADx~W zMM9Trcj6&pum@l;H<7W%{uRtBYf>5#tg4-ln8TP=#5G(ZqJN!1Ao&_wkE-ekKN5Sy z?B?SzH8jLk^mR7g-gCf_T$A|N$HPH+2-&_%BeUE_{nS%wK^P$>*A@R-X~=sfH{Zd| zZd-ZWaSc~DI{IlW7Jpw$B(`Cs#)oq0FycDJ6_!Z1kVnVkdBUjq7yc*qB#WFf>mffS zF2=eBu_tWhZ}S=Fw?W6epK|8l;J9!u-r(a868n}DLYhb36>md_x^G4C!?ZBZiO|z0 zhhCnmS|f?pZT-5+*J#Su$W}fYf|WoVMHB23zMCWRm{qE^w4#d`SjckAizdJh)&Y6)51o#iEt# z#6`!?&(Cq}*hP`pvn`cWj+uQ<6ud`K@SgknNN_`-&u9Vj1f2?A4e`T~W+M1$`La`n zb>4HF5$zZXL87ucqVk6N02{fiD=fJwZEcgypX?lT4^7}FL`P1_I?~*sv)9%4?uWT% zgiwo<0NhjnsNG=%Bh`57Om@aEIkRjLce1`HJ8E8P#WEL#`3s-{>E~nhz=D7 zjL@pN4EUL^sXOibKRX-Ho)u_6%8S8BjgR<{4NpH>Q_3WRGNP zk>lEeDa;}=W!)qW87iJq7&m%#@yiX=vn%R?tQ%DqP856~ua-5`oKX|q-U#i;0P;TF zS|oPv+{4h&Q0GX0r4-9CqZ(Dr7wF7jD6g9r2TSdI0^q~OppmD-(%uA40-zXWBZEGC z`0(*2apJ#OfZ)!KQoGnHphgnn zwTKs-+rBw^h_5hEpfl^riPXv+yUdD&msY&u)LLO&*N`5s`C~o#_#CG2+&}mbmxypaD+-7I!~?`QjSSe7A%dg>c_y(c=E!HRQXn zoa13Q#G$h*u}Gwj1V#10t94;*GAcXk=1+Nu|3jpunwwhD0;*d56_+q>4NhE7o;)#u zDJ+omMp+^$|1==Zon2>V$$g|ND+seWT`fgovcAHn9J08cfH{^L&x zNBaUP?ukTp{-e0FHfa}s?K^#pxt*=Oy^^|C^a(@{q&@hL=oi>mpyfbjyeBQur>EY& z!5g@Y-6ddN6qz?Kf#nB-W|MRlI^NXP0yQY`1yqx+uboMf0NUerR zmFJX8`@z84Fq*Fyv(oM;J0$XGAt+e^bJNnff(u5MPq}w)dPN|Q#)P-O?UK|Pme3z?GcwSgY9kW! z&hl_g`qjY`np;}L6%}WDKy6$_TtXBXv8!Ni9_FAvLNQlzqfG5$#Dbn>@=8=#nysz1 zwO;E&EdJ-SXqN8o?jQ5>&5#vb>wXl?5;ig2JuDPBRBAVkvVDp9YjeHxMzC6EF$o~?@MSJMc zQ(EbM>~+EfgI}p<8#~+VS{{cGKYqyUITPDz+LvcYQKQm{hP|YV@ftLOo`(B6r-8L| zDaQ^?olQ>n}EEiWpru_3|-G?1#H*eN}MeFMkETQ{^T8WS6N1Ip>&PMDPF*k+( z9$DDAO{(v8tby|f_73@(oK+~!P|_Hc1NLKGVQ2Q2L{19X=6x+a+*-uOQ)<^Bsi7Ma ztn#X0!ED+zK>)rqXR6n$6;OKHql)MFq0Mdo*Tl*N=q-GfJY1RAKw7%k{U&(T%`k*W zq-Wx5)FJ~tJ@q;$)T#nF`aL%o0yTh#9WTy&W;!wHhQwsDgks)NPV=Z?+~cT1z_FdK zu;)0D9NvGU3+#^Lu(~lN1u24l*Q~d1Y^*fdgr1Q<@aDn+DvfS7f1#S1H9s94cK_Np zw*=2)$uzP*?c`w?_eoTgnXBuYJC~JtJ1&+#$Kde?C{ic#EK6t(mDus9mL;P;6%6L= zWP(kemPqZ|L`SL%4BxOY&~_YAS=;Y{e|-&`m`6BKc0=$)Y%5=C2Y+JQU3+`RbzdIf zr?|vKh4GAm!sEr^qoTo8Q?p%J{~FBy5Qu0pwhyP7?4D!AvF=aE;A1{;g84u_d6ggj z6l36cOqJR}*C58GGS!sW5b>KJLcpM&UGVUb5jcIC{o%tijhlg4M93}zfY)Ub5)z~j z@p0_n^n@9mHM$>JCqNO1;;=LL=ljb}`xq&YmLQ_oPo1zNQ!6yxp#fab-!!@Vltfqfw}9Yk*x>MNb8s zV-_GPb8~ZRAe;>i4RyB9#GD~ljc(7M?#(mYB!TCeH?@y8V;@=RvC&n5LqY)nl05i< z1gmLiXoz0A6`Pn(;8izN2oJ~`Nf!ES6nU-Lj!W3@SSOo%XLIqicU@A)c*)N8+_g*Oe^g#Mex$K%@iFd}T-){s37yWE z_(J8I_rZvzb;ZYzA46`%t|XJa+Z>(#Xh285&RJKy`#7V$7K8n%5mxukTQ--guuNWux?Sb<_BjAol6F zD7VdGTw|BO9MojGrX~OR@QuFGU7fqGe(A0Kix68k%PdoGk=RS;;vX?nhovQnNrF*c z-LT>l6A5k(mwAxwq*!9E*IyT)40A|$=%C-{+V&MXUN08tl~2!i$9A$gB4BuEXp2;t zS7&eli2#cDs1YgX`y$iuvlouF=arh29e1a;Rm?518{__HF)}D4=0;C zT3lNCabaQM9a+g+cd6s1XJ*bO{o)VBb7EHfLikWbYTQRZ+{b(33|2=~-W=eQrdgvw zo1fu~@HfU*oZ^XM@-jS&Og2zcQ#(-q0x?2AMvcg(+6SeY1=&DrGLb*u7V2EvaLK8X z43j-!X^$Q;_q^!+9CkLi8U#+(%|pPeS5C611y_g5SRGsFO_lf>*fHfCm*QUMmn10G z>&6Y)Jbjc|(8P;@1>xb*K0w92b9!_9Tr?Ven_y-> zgraP3y-=(CnQ!ooRE3}RZnV`AB zf;K292moC2+bfZ}9ubGPLZ;k0iIRT+U1KT3zWgI9Idb1)xe#(_^UK)yH+Gx;=qW>( z%=i*6ANnI$US1yLE)!KFqMG3X{m|I$kgA<^UPS7?g~YtPyn*tqu#Xcvo8*8?f~=Kq z$*Xm$5bYn=MG(I9B(_dH{culALIPye)oYrXqWjDUCb4{?FD^~jj!*7y3QGgxFoROF zZpr9OLXs{~rA6Up>6)EJH!}k*k}(8%U?1DN z3FqKO@M#D)@1WjG-~3M>J{h6Z)z`Or9AJkcvQZKk`N9BJmFlDQWbUPjM*Uk zTM^|>Ti;GAA56^fQkUhbg)fv!g{A#4>chM+AC!g?9QX}IDT<3b4Hfj2+Sw1_nv#B= zfOO=Nd~);H*qGC$iHr<^9KB-N+}vDxzCu$DdXK{?c#2alW*gAekJ7A>0YVA!Ay-=|<=EW_B9ZqHZICE@+YFI8{)M}>=xLjUxPn4B z;0LNM5TJN^+FA=@iJjAr8nv~x>6058US41JIF7J z>8pz#C>A+QHh0L#$QY;3Y^Xnil;g{>)}kANtYE^b&%-u)Yz;=lgQw}k=!2=x#eZ^A z2#sQA&L)O^{i?@s_;Am}57ML;ZGH4!WI007xu;DBe+Q}HNBv=t&k8yw8ALF@SyY7#IMu;E_hxRL4!9McobhB&rFZw})%FNnrK}z1VpEo`i4obw1 zaFX{^kjfC7n^OjA+a6o?zr*MRm~xohX#7V)H?ju4e{Y!F*p@wGevD>#=mO`#HV}lC zj~^KT;eb7tQ&OsC+01AjR_T8$q%t$Le4Ffwr0A*MXE7BKkD3y#EHA&f_-onx>G_@d z%Utek+7yH;oljD}o7awTL*k06;oOGF>haJ59Nx#;kkgR>BJf8{(xE|Fam&%HeUCG_ZM(zj$QkSD>FLi%9VMX_u_7bALo!Kz zTN&CTN9W$3B;RDhg@Q-@)mdR*J?Ir*6SlK;DJwIxeED~X^M1Y2%<5>1j9YKs92ZH$ zGB7a-Jvb#pVfAaVSpWP~(=A%N8xVtm>CbNk_AV0)UL!Osjy!Ste#4{j*|od#3&xL7 zYYG5?A>?-4&p*y~d|Krd6%}rK@H?5r zW0@s8qBv(2Rn?W(uFP1;g$%P@IlknV0}%(zmP|%EFm?L~dPpY~OY6#b_C2;OnMq`m z1LqjP>=9b~dooL@ezjP3XFyB9BcinQ;K;>DfOw#t{rz0fmVEQvA<8ySI|~kS+k#ek zdL9@#u%a0yCo&y-r|{`k%N2;w=&c)k`TSVB=|wS%i(IVT@_$w8 zX8JM@xVZy_toBs{I=rw8`Mc8rytom{#VW`JK6D+RE=nIsG24Y4@}rJbe>U^G;Gp1$71j-gm@BFLveTKfkU-3*U0f0!b=dCLGDr=lwWX!IAHs7Zr8>U zD2}ip>WEJ%GgW?*2ISa1GLrn{$&*`4oeZJ3A5RMmy4e-P_16WiFan!JUQ?dSxPb_n zOG=7DPzFHc%y>5U7F)OcI&K5AkF_2_(=ca2IQ*cF5hiK8(~kYRe|OffurLdwz^OC( z($~6?hLD(@olQCLx~(`p*+{Yx;4`(96>~Q?XpTGiA`{{cvzrEGMvz0qiQq$;k*ZWd zVU&9&mYCeFFONEdR(}OHMo-R|X8@YSy0nZrn2`q-cM4clQepx0rOZ8%g~ku_$E z`8EIZXJwsCRQtF$bh>_D>C)Wmd;R*g)XgS4`i0#xzNB%8W-hz9FhF(y6gAqxLe6+! z(s+P1AM4koK;-4daIkEn|5*#j9aUFXRfWEjOJEX=;{RH0<9gD}@ajto*ak>W7T&ZK zDma=>c4u~Wc08U*sK7pe_bLnP*C@cr5Yq5Nav=zjU8~8lm+&B0=Dj#wszUFU-N>AG z`WIKHO48A*7JZH6{Zw`me97Jsr|9D@FU*3)RQjdPPY8k!YKyLyQtt_rG(-3(2rp)Q z#~QZhx2-N+8i;}T#%RCZlB8i(IGtkj@85yI35^jV$U#6LQp|9W2X?R6kX#Pc*A__h zy3@t9ED(J|dGXYf$Ekn^5Tr$pSHuelmFim|;IA8~Z?W*}uH7QE36rS&H0%Vq{E&x_ zKnoBJT7rxp=kUT= zvdH51v4JV%B6%M7e0Dw6V4nX{ef`?2Mpb@6ItqX0;~ch>8tS4pH`~&5@;A6!&N|RE;Ma!%9w^ zkaXZ2eC{g1m%K9ndjIKz1Ao}?)^!y)o@JK!=10;V~A6)G_+aE0CPuAZH z-2lgkoRh<&^ju}}9Pi85g{k6LmHLeT@9~!9S*` zPL`0cI1f{Uic6d>d(1r_12-m?*7W{8&5;KT(j8Y8jdbr$A^~*sC$9?$sZgON^C&<# zCyVpiy$aN8{6&Z753ZBT(!|=L)b|C<=8?gr91fmuLtxXF@@s2{vcPItE5{ZsM)-3n)^IN=peEN!6w_FDX--;I8O{ct5JCzfx?1nQLKouJB zJF3u!2dj>jA(EnyRRTo&ZOus6#*plNur&p+hJQb7(hX85v;+qmPP2E0CyH@AZd4q5 zSL7w4Sqj)W?(z98UB1i5qvU4QPa*AgC|%>cq2%1V#x~D`PdELTaEFUOHZ$fW51sso zyvItK*B@rez}c+ ze3{6hWiIMPemhQ&PrnyqG@rqF4+cSf^`DfH1=~UiIs@f$7D-Xmd-wK6UOn>rW;s~Y z{R)g5B!O;8y~6I7-@ku%Bl@bBI9VcArWG`hd^~;}O`f4N^D(Yl!LbRku}P7@_I7ip znweN+94mCnWXw^pEw^v!N(-|+bgM0X*!JK&--0W(ECVii=eNC5yrFexJ8c+mQZRRk z+9c6*bLmw<1N|_im3MjL?w0R8Yrk)<5K8edw9rUAJXR+JF<5j=%nML?NNP7L7`u(J zr4Xq|e0{At%5mi*-mnSa(a@Zs7QF5kTi5jCVTd{jQA zbm^W;%)!V_{NFNqpmZn{;UZJwk`z_aEF~%;^}oo3xWl^Uozusp{v^#bU^K+o;X18KPYPKkW27?T6%y?eDPdDI@^O&R8T!~ zLM&|*UEz5)f()LBWnu01L{W<6%>cC=h|BjO%i1(8vq9G#Jjsg=6=VJ!oDbDHH{sT} zv!1awOB~^*p->6cuw@y1TlN@o9?-sfB_M$7jaAYHoO+cPDJe2%rG}+c18T>8A+Q^P ziG4>Jc1xf6sMLS{jWX7yR(x9d6%`8sL>$oCC>%ZfAl)mze71sWQNO}D|882>)Eic_ zY(9!woO5$u*`*OL5Xz7tl$kI$cN;s;(qb!AD({56fs9)g&l{g~z}TA3kQz;gXOQIu zTbrtiSiw4_q3XQrHB{ppKDY9_lT+J+*HRF|aA@~^2aGjP%iOz=f4on2da;J4R3=Nw zy+IO&Eu02ah197OkQv(N8u{;Zb+-2fRq_#kzJ4S$4VetBt*^32lZvrE(1HMr2PFt zrK~Z?#~^D8^B71VmfO^C;vSXNOFz88i1qN}07!Q1`-ytQdYH)?W%>@&qc5mQczaG^W1&5xx0|MnYz zWOL=SS0zqtzg(UM-y#!dKc6py?5O(vBK@j4w3?Rt=SP)B?m|{(NqywTUPkV?^8L)r z=5p&=aOY5urQ~({afUjhz^E_lk20XVP`C)Gw?(G_^LQ1M$xeTa#Pf!E~dV|d7= zt$VDUOisG^M>ZPgHvt~G&J8|fU!p!gr*@qUjb}M=?X-jEL7%UlhzsTOky-8OTYN#& z`4=I9SP7+4hvEuAi0WYsp#d;h@L9><--`;Bo>3e$$*CjWy5kSfea~m@=hxgb=4)R` zVdfUaW|Anqk@mskla}t95PjZfB5^92=Qx4^GeNYM3uID!>R>wn`d4#do<-qo6eLIsB3 zXa#`9f8`~V5?gcaQ|2~^7u5LSh-+WS!3TYiK3A-e;54^a2-dl^8q zxZ~e#AwD#Mnf2u{?eRPk=8hbMeSbl!mH%+yl(A!O=z0tUI6UN}We5U7Lqh@R5zY)X zp7UeV;?trdun)T3ltlJb`cM+SQozjC`DReSzqv$gEic;^mvdqGnTjU$=SKTBrd`t~WT?x57tSrf zEULqC1s)0Z+;B`|-yCz_ZEh;phIe#C)SeljCMhAW$@?k&)6R47cNSC|Efj7l-X*GH z>V{{99v`}oTwB18Gl^Yb6gZGr?(Xj1sAK+BDvCv;V)h9&1P@T`GqHB;ZVI&=gPqdg zjIAe9=n+ZUSH{Dk`8Z^li9`pU>B0f6rdgrb*c%uW(;i7NW4e@atjsBsf?$GIf#kQ+ zxzZR)QWz3bCsWjI4SLd!4!NoaUiK+t3(d`q&BIya%4mAh)RZ~Z3vZP6zgmC~BJdUV zO!N1@8N?;i;=6>!1{1AklT_zPny;uKU!uY@C?HN$7_2ZT^#*UYA2ws;y!qM&7b2VM zI`FUj&Thl~_ep&ILK=jDi_(KtQItv^hgR3uD{G156(JK+#vVgjU8-ao0^*RtMRTkL zN1$}<$brONLmJNrmN6Ib0?VXOg87sZLgoLfzbRXWd1v*JCL=k%O)eoZv&0kj9nzZW z!RsVI7f`svfV)jW$cAC&4{Nn(n+=4IbISU{XZ-R}?0`ts{ zoA*_(lKEF+@ZeN*g?YO@8=(ssq_oq-SuI^&Utjk+bDp=~+Bkl#xxVvOY^+HH;Gr{t zp$0TdCI~~sY?SLtp8s7**N=A$Afw#(%<20E6k$JNXwO!7}&37BWLmCp{C`eBCoUm-abnQDWo&5ewC%mlSIbt1dlBpWG= z$tRNMjjtpm_H2MoAi^1P4Tf8C{44FddgQ~gmSqNH^9ZgDa|#YGlaHwLC^@F>u?W3D zTO<$k55&ZK2o)@Ei{e=2D9QutnDM1oF@IEX@)h(>x=^HPcNP*E;w&^SUlbBCi${Rb zg|d~^jScVg@t!%VMI|JM;?B1kUn$jaXJ#dDT7(W+zEtftar0>>6v6qws;U~wo88JH z)IV|<-Dz~VOtvXAC?(8<%WkZSMOQ()AtwQ%P9^gePD`3EnG=p(5aAZo?FUFXga2S= zo`cBY|JoRf%Nrx%;k`gtHcYMjF272>xD3!mZRO)pDCx4m@7?Q+r6eh`D|uGC4Yh~A z{oeY?Dw9mfYt}@XqxLZXdwx*FkuP7+;40-)O?5tvpj!E3F-Ee@gmf=eoR7P^tgY!6 zfzhz#l|?R$o59{gl>GlnC)0X6_K=q0Z(nPoEj_6Wj?Kt1Q!JA7lZ4=z2`6Bi6qq&t zJV(pZx)^XB;M7SE9CMncrzLVPWr#77ULs*M0v|e?Y6zdK4p7`#s-ZcQ3#Bh7;IKZ` zl-rWcA3xjy77~g%jvu=kxXVl6#HjTS3|v~Q;j-lLBRl&+X7K>6P(g1i<#Exj0x(tz#VPYRVoN0 z8dM}s#0xzhF}qOGp25#s!Gy@4S}({quUQjE=;oCGS+niiw~vd5M<<^!vA}j5na6k> z#SQ1=@O6v_UH=`BNO4z<5(QRJhjcUEOQ?VjXjarbRkMtd3BFYU^K`Ur8~3z*2-&KW zDd_g`^BZ>LYBkR+`CVUL+?NRDPG~7mX6D#Rd(SFnQwrI8;X8YNKAOnsHrdll(s#3t z)i0c?uC7KWeSro*9eGL$>#0+x2$lCQ$)u1g{i$ap*qNFp&v~k!bcOYc2e{i}N1JA$Pl`=mV^c%6K3n_&ZzYhcH#55;+L_1xj96WtOyz>~*vHzh-A8h&q@C#yr84I`t^*7%r7eZX z7HYOR1EqenY3?qeDdB~+-ywn(&QE+%90x5D+=%QD`yjGOOGWUFht(XW7wB4=uR44n zQLzg6O9S(-u`Jqvp)KDbc$ay~=ldCOv#hESw9xXfSl|OR4q;!WuI~P;1MBsW36a5F z2)3rregI(Pb8E07E@sq^tcPy137Wy?pjb~Mw~KSVCc}~lj60k%lr|wa7Pa8O=w(Dh zwD^yxffn)67?DEBn`b}-ta5Kb_v7z(3V$A)P3v?v`6;AO;$Z;yVFX_yDBvuLCuGMR$spp z?wq-9xcJ-20xZXK=n_KniG+?7W2EuT&icR`VVgZJL`jj>j-hTJe$r(SzqA4@9yyez zQ8gyrD`H1XPo0v3t)swgV-C*G%#6-PZ?&4(8Dh{nFDO!kt`*&LmzM&pphsE;439|E z#aUlLkNAScy>;XuM1A!@SyqA;f|81)lyiS&b0S0HHwrG!)jauFlZ z;z4?UcAw^1%jcptaPbMGM!o&}Zr zB`jk{1SyD-9gv`n0bdB|17)U-;*2I%?Lj&_qHFa~YxsMr(;@Stw{KI4Mx8`}!lTY^ z0F%G;^XpSh7)38ds2CE>%E>`1@aQNNU*0{TiL)iYzm8|Ju@z^7Zr>848dInWOdXaP=jWS zt6=}tJ(zJH1FdPwxaj9k+TA!sO-LNfWKGm3qIN{j{A)bMBj`O_ajz4UmKC0s@VN2* z2v(988KHU}W1q)E-RO2;*u5zI?BB@Td2=W&_WG^<*Y(DRfuCnprhh1BeDdx^)^R*7 z`_n#MYlO)PVsNDZ2xLc5I$B={9?&Jrc-CQ29ohayIe5CCcCddPxevOlQgvOS^fj#E zlKIz>snt8h9Z+BnK1VTYttP!jAnGKIU=De%!2v3AQuGwkPapOP@*47C)S^O`f@bXo z7>1kn38k6oso8|#ElOOzcy36lt@!>$w9h{tw8-$EUcfT?>w}dbYug+tJR#3Sa`Jd+ zUwB#EW84e*Lxt1^-q3P(p3TlI@ud@l^rP8Rs4s+3p?5wk9_l!w|tIjKb}QqL3e%Qn@6WGRjLEzX!>%}gSN^Pi>yrJc9b*Vr8D zY2dJ4dZGSCeAm>tby!Dw$Ha5@5e=xxXz|=suo2sbh`HC!De~=JSNlcbX0;Dd=6)0h z+!pZ|lxRS9Y5_ToI+IV(0rSSQJ+S+=#0=-&_7X1*4UcP4r>fAEOA2p!jXa3FPw=op!o>Gek*uv zf~4G8F-oQVB|uv5%%*WQ=6KRe{`sM>bGHh*uiHUwk}XW2A;+QC=1^{zeMrc`HwUwZ zJEWO`&#fXqQ#kXld{4J3RvhO2oxh*HEQTDUwRQ;Le?H|$;HCVuN9(ZqUN|}AI6Iwd zkHX;}s3-t{hw_@|&{<{nGY)!>rQ(i^`SQM6h7LY)k4Q82iQ{@_Qf-JQ+KAu32gN|A z8jYY2AOdrM8-Hn{0Bb{a$OU+?E1c1Jfk}wC05t4jT2&zE&i8{^I*XR)&$zC!jDR3(Og29mz?24U9nhWQL13FMKygd zx_r>Xux-|kshG{1TGFsHO>APk8Bke$YQBDBnDD*IS+m^f`vN%99LQ`!peuBDZ2!6a za^wQl4jmkB(+XlBjbi%;6*q1H~ef?rQ(@I^tnUtf^kx_PK#Sy1^Q{o z23>4yMuKe3?N~K|;*g6p-3z|Ft7=q)oNx4`aU^W}N1EvonnP)QeSHhcNh{m$JfFVO zjbFe_dh6;Pq#{^6NCdx_=tMU7zDxx~Xb}Dm?#-AmM*;#|ZZH2<10pMs*<(T($%#;9 zgLX~>e%wDc)*Pim)u69)rNoKJNOG#a{FW6YW`9^%kR#>P&oKmHMA;EO8K!EOLPfDA z?B6sTyBm~T0Pj^tjrU@T;896TK;O>(C&$(o$^m@SU>E|Y_T&E?I6wcCI^nJZ=Kj!2 z$7E1u-B$%QO)bzFVbnU$A|#Z2Qqmn-dXFK)AWw-Vh98iCi3GAuMI2NIlL>S&k(S>q zwer~dLyw(}D_83`clHLBnX;354@dlpLsi zhOw#X3LmLGixTChg)GlJM6`el{QJ>D#e=SylN;yET{*QMPWh_q?$5wLVL`b5((VXq zcWbXYtbVNsGjD``-Ug^ElReLVYsj~Qq|Ahtu|o@}+vNE^*5|^{Q_$C|liRh=3cdgO z?c47hz~a#V`4oLWcrD1ldNTM->?gG7=q5(*L*SrMz?NbFY#9ub2c)$&a;o=_6m(^B zYCqekDc8R`pct9cE=ic%s;AVF#Ywk}wBr|>=@;Y}giA_G|5{*ByW6^CN#j~H*Us%% z5H0=)$+W;$$cJp7uz!L1m1YLX(kR?T>Aqo5yZQo&3S&nYgE^=(J9U@RY=OqwH z!=pzxKpvN`e5DKg^`dQ8l7*(us8?Z_yzUh6rjS<|<(j^m@~zT=-gHe#LXZMHgDIcZ z<&KQQbyNj(uU@|nGcy0$5x~5-?7^@aH+Gq_p!Fyn>VrC(QTCF~Wu!r!UVtb5T>}Lv z7j|ZBuWNXx&BErZe@=LK*T!~zwVU~x>9pGcU0sSq35i&Wfz&&%%^YrVC3;egx8jqI zI*Wb7U;Z?0C&$YjYm%VPU+cX5USIKBTTS`PtK%&%V)#mk$BLo_evdK;!qpb!6qzMY zpU(P!t=|Z4*lxS0pI@8r?mHCRF!hkQvNXLnGFD%Wj%WD&P{DY_E#b1l0J12y7=1`| zNNT1fwc(JHTToEuphMajZdBWHz|0qWMrmcqwXh~1yyR+&p5~|npadlOJH!O7hUij~O z109ur922|-xtP^g??5y6Id+9Ia&j$kj}X3;S3gvAe*plt`Tawcplao~G&G*9*~vBC z)c!VzGxn3MQKl!1)(g>R^ohR*h0Qw1$fQ&5eWo!pAPsEQD{FLyH|$NWfyh+8lV`EgPB8TNPk zVXjhq-}ht0H`-9JAEPv@Bs_Bs8!S2`Gc!6fGowgJYTE3%*RWcC?d5M3nV5%fDy7#T zM~M!hKV+oQK)`*bJ(yczs4}La`%zzCKg_mgSOn`uvE4CxE zs0vyKaf}C$hZvOzOx`4VuZUsh2u$X^lHt&Jk^S9DSrvarXSRFMyq3i4i7jog5J-PNqhgknlbtJ9*MZ zt%sG596$a$8oGgVPxkt}oRw~{XLxlK3e`LVPxZvjvLGbYI08bt4S7{dtT#GnSDg?zYcMaf?!`+^vD*}YP_*6>gHMgrNQlarB(#Efw2 zZhX-@IF)ACqJYpp^-&`7wbR22ba>p%%E(C*GOjILWYAO-q+Dgefg@uK=M_T{r zri%R9->GGh0kfj+WZZMQ?jL1hCm_RoQe_gx9d0JU^xuG=q^8JQMcR#NTqm zg)KopQVd>IYFm?e;NU3KL~j3n!vv;!$K~nQAnVMM!t$4!Zca{EnD6;Za4;X0iVLE= zfAzuu^g_*_XzlN7zCUtLC$q#Kc0sAgDJV^TvhA(dZwpVHw_$OSyW!UJB3Yx%2RS+O z_(Te9&<7)%QzUhS-&L4Z(RBg3thek^bwdZ2 z_w9bphbfiPzK|HI!3`C_A`A?^vZt66Pap*U!8$6O)*uWznS+5E{8fFsBY?AJe1`4I zc3$^Om?BOP>*HOACs1}39T-JoDCU+W^{2Cpo!Vsp_+=%&4^*{$~<8?;+G&ybZrGu$dDqL$e0>lKGV6H~SUqqj7_ zxhNLycDkj*7LX#Dp%2xj_47GK{FMZ(uIC|+5953|{HV~ivWex3y3eZi^w{jP5Ib^@WlAIk&8;m%0yX*Mck!8Gu%FCjRo+SV zP4zEDc5W8YTTwTCu=e^O+L-3>?P;3G<)POY7z%reQ|>hL!O7O~fl{$#I|Lks3?Cog zF#L@o(Lx(~APQk9C{h0OsJs3kUX3H!tY^d!JO%}#ttBpp=uU#iFiOpUpM~l4G=K?m z2T3??|1oyfCDNyk{=SJ8H&*H7mMMe{G4l=EZYsVvelYKnn8WhF-l$jUs1$^kc+}Bv z!q7-CRU|njv2gL3#~_esmF(eE8_@<^$7cg`=pGHZ-BZ_5Uwn|%pXRRZ%__jei;YK_ zbfPZ{LW6mF5sCc*&J%444L{jaVd-q!H|MvPU38tVwt#-o$+A@Ct*rM$a?LSf_zsbW z)7qzdU|Fs4vv0;}we@~`D8k=YC?8=7D_|-vs<#aq7?U2=p6sb@ zZEdw7sn8rkZ%J9WYVjJL`o%YcCEel4LdcY=h*&3h!YVnbLuY!b`$`KE?9fYa3f4*ZY!(AUmk%&I5Omfu>wT(CZdg-U5U&$L5%QjNjEZAh{0R6^-#co%d%pNG9i)6nqX9!iEU)G=>7^RgVZ z05yi&_Bg*g-W9DJv?^guSo`01kEAP1p@M%?3GSpYeFH;Be*wWc@afvhJ<hw>5| zwoyS*aXo~1;>wz1_`0JK6C-22SVv{_taLs!jdV9RlUWnA>e*BAclS3`fLb*SfK+`- zwGoWF%5xLS+V?VJjoN&4-0Cavl7B4y|lS*uM1$JkuLg9uUqMD2ejO!}k(5j~V5)feMC8FN%n9~KZ_MxmL2<=a zaPU$ncVEL+pA0(HW2@T9_6qc5H>j{1uZT^}ul(WST!_;4mR3K6j-Nj3Ll(0@otj}C zL-Q57MHURlmiws+=<(~|u6eQvo|{rwQ#_2A7R(=~M}(^2W`$SZTy|wsZ7O1ADCMbY z#BZ0r_^hd}8t&j&l>H7@tH4(S5qh#{$6HQq)#JV5g;Tf1vIdu}P&+`|@E-xz>&yWpkLAMx8jUL@* z`T{3*`BKW4-OV;llcsBtuHPyrP{iw*Ei3{Tm()@IqW<68(qEBKqZaM?8DU8Qp!oqJ zKc|G_wkd9TwL|4K)?x=Lbi`(eB0E!1jWBpjUELbb#C~&1eIZaYtz_o6&tvz8BG}cP zg>c({cssxk1ZnP0skWTrXZTV^Y--&gS6Xc+TPg^493d=kpRyY&YR;)iH6Tv;`RI2- z4@Dy$#7PP(d-iRbz%+xJ@FkI7VI7b6Bi-$(AA2VZ5^qIZb`!QF=eVg4dr zDp!C{ zqTe=pY9e)5YNir)@_Ew5M;GeG_V0b?S~+CT;Er+613Uzb<8>#e55DY3*Wvx!-FHIG zdsvBr+LfqRQ;HmlYIwSX%nJgTt-zZ~`+N z$xjpamlm??=pb!N_teOr9$BDj4p8<)jQ-zH! z=noot(h8h25Lqf=MpkKK?jS68)Vkc2TCHTCjH=KV$VoDw(C{phXobc2dPonELJ z@ClqYxl_O6xu*L0Arfh^LbOgpRBfCprKM%hsz($~ATA{d!QFv8P69Z8PA>cV&wHrBBZ2hlgJvLF$$yU^VN{&A z^D}J-8#I2CSxN z&OGY9Y)S$j{QtI~sf4RM!LZ#r=5ZR7oYy*()!*?<+Id-7*?iIf(UJorxm#Rh^zQBJ z2D>2^3b!5j;cDzBN+c@MSl;TJm`X(MHMoy)vQpYsu;E_l|1)iHDGy7QqHCsHD~b~y zb5ZhA$f%S5w{ZdidS~Qz%NiALn~)v>DG{odSpw@W6we9JhM=b09zi@>A;GZ$3`~T) zm~XbIZl7h?zopIx_w;Kj0g=MwMM9t$Zeet@z7D=f`A;fydBMfBEFG~B|x z;H*Il-@`Cp8T*fSAjq(^vbs71KgCHv^njE5W%f1kb{bp+J-E9dE8O}Lo7w<9xUNv5 z^ZjzdXyOkIm5|NnFjg8pJyHT7#H_Nad$pacmB5{X?E#PtkBUHX&t?2)OVyt$6a`;d zoawM;aQ`<^?~K)?iott;RbyiX001D2LLR;V9#JY$-tkQ*YpKbj7KhXD1YMX?0pHPj zf3B6Q^+d~E-JZ|={TG8ku{g_H@CrwcELSQRSHmx$+4tCfy)XPn5iJ4KSvbprK3+xj5kv|pK!anlJ0jQ)E*;iQk!Mv2QG5x! z5Z%Ri6c4%W>7QXo|J(=0wgYqXPq^UG6(E`uu-Aa0`l;b50yoe|pP?nyj41f?v9uA9 z^~wutvsc#7!7W@Ua%w-}mJ9Vy-QDEK;xHh-csLNAVZ~{wK437BSY93`ght0GAyPxq zZIAimdA%)}HK@j1O4Tp)^h*_a_H@hT+BJPx!r%AngHfq4s`&z*W{JfP}PM z7Db{2U-nf>MjK_Z&*()-cZ8Km#NDguWUZ-`o|(oV6hNq0+a++LTdzjDPefl0ez+>p z-eQT+B+82&YduY&aoerB1M}IAL9!?12T+roB~B{s(@VQIss|3mX=9sBdEqV;sI@zr znj>3tV7s(;<~cPD^9bk$jt_`o?ka=di+*}@-Bq2(M@9}}EZ~ZkGwlCw;R7P| zpJCzh>LMpyvFFxd1wmR(2-18fPKbT*#-U~tQht7Zb$sMt1ca#_J(l2|V8sB6$Z?&ah`z+A=O?i;Asc>NHtvc+s)#WV7mj6LA>xdA zgCG@Xx!_)pL=8b7{GQAVf^A|f6Az* zU{+wq5AK^SDRM1swJYob;3NsJtIL@B9qw2bDt~X^HcUKBRM)x>nu8$Ita<>J$SLi< zG`V5Vut#uma_$X)-Ww^u=S_g{YQNrz3z`Vw5_}2jpv%4A+BrDr{dMv#b=qb#5ZoX* zQ}8OtO+9J?Vb08)g2|?^J0LVwq;5t@0D_jEezk^d^R)=_=dHwa)APR4fw@X%nzM&w zG*Po01^!InqQeKhCCq@J(|h08Fx7xXJDZFN4(##V7rRn%H>-CL#HMz4Yj*EVRR*mP z`Du1a=V*^5vA^!DKLTE#SZU5$m!hhqv;pow)+1Y(0|HpkSsUjnOB7gj`6h9FraY)H z{O^O~#SyLN%E$ryTwERvT(WWQ4Ewo77MZn zR6_u}Z~!rqsW~FPU|#_IyO3FOrY_Wq8dMC<0~rR$6_i!AF0_;}qXpOVJsy~=Ns8Fb z`*-ltIS3(KRgaZxP3{>%_I~L4%iK`FPR~TJu-$iHyZ@RRxM;)u&RdEUj8pZ16&WKZ z1jY!WUI3Om=V+^yfYV!FCM`1~v!5!unqcNP(j5YC&mNSwfcr5X^?LsOfDbDnkL082 zvj`6$3581B3IAC{ivi5(L4}{q5{qYIz(jHuJJo#fO}fagC+J+3Fj~iqka7XIa6S!^ z5uRQlBp(sBp=HDv4DMv$-%l`iq69hqFX0*Q;; zG0Fe-61A2f2nuf(j{mfeDH0?KDq6x_LL%7IPoSRB03LD4K$-iAV~-dT$4Jkw#ge&$ zegb@g-!IyV!K+CE?;n)WJ)4~Xg5Aj?8qc5SR{gZH2vQ%UT(zzd^NfjjtEpIQ z^?<_;zs7&StsAtsE%Ex759hIOW?6V z4?r!IjDjF0kXNxm6vgf*??VL00&|#o>~Fqdq5eV>uZ=3j3mh3jahvS<(Gx?T-4(N@ zxb36ism2x=b+e4fVkbEJ^MC}&Ea`vaJ_vT(dBPoLpHbgK2tRlv-z1@^uZn1s6*eJd zyR@q-W851pqI1IQIsPpMSxk|ioP)bt<&2#u_(WPy7(|B30@g|XyD>a-R@h9VOLEB%PC|6 zUuJwzD7V9RjAGs1$Y`NkwGo6a1Sx-A(;y4DtiNt=aey*a-zw%Y4^za$0clBpxTH2@-a9wAtkj6ZgS6?djJ>{z zAaHxu3a1#h5=Vu=NRFA!~Rx6C*P>q+KXU&%zQsnPzRu2mRHtQh!5-R zt;eVg!{JphU&jdeVN+ccTLX6158Y%3jy7{qpzLVE%_OQ3dE^A}LdI~7*oZHafCs9G z|H8{n*NZuv11uu&bB*PcWEWQ6_0H2sQhU-Rw>P)^=P&`XT`T{Y=Y)$&y0`XTqQ+Tw z6l_BuJ7>^oO`GJ%;Wyq>UF>>GbqWp#UeP;P9Ky0K^#!um)k~8hnuj_n51007B3q6Z z)rfF{pZA#%;nQ0BEM->nlJv{#*DYl=vmo)4St;$0?_@n%6I7rr$c5WTkbk0V>)CAC zQA=1N?SUx=9l~Oe#z1vH&J(HzW*xqK z_w7chC`pgGUTjc;4A9oHBa8Rt#iriU9;>>P!x8aPnuo1$^5LkQk_za=qGe{uc;9z# z>nTLY?%f2$Pq>_b<0Y)fBD+e2X@qvFckK@+R%PR}H4M0>ewinNka1!`F3Q?}bnd7e z{P4Y(aktjsR{cNC<0D5JU$AOW9w^FIy=2zw8*4KJ!>2z1ye9Jl(9h&E-ZUbI3!yEtj>D=;j8o08l{Fjf4<3!-R3LSzh;^eQzmp-9Fwxsrb zl7>H5zq~|&gcTwobUqZy)fTD#cfS~CDJEjS&!`xDMDYzKa(rTe0{J6he@d+HN9Xg& zWTt=K0ZljueB?@t)p?Y5<=`vtOB0tboO=1K?qrV`IIxon=81k3h30%3|FPr0HszH5eJJ>X{Y5_3gL5@09P+BuakV`{@Pc{ z9ESLrWiI{wE(jzE?k-VhE(kAsd~|;K?I1QaYAwYcn;KhEDsfs3Lpo;vaTaXHyAf%B z3B*8@^Zz~&*=p+8Sn5$Ywk+p=5^@P~_x6F?QnvqSo(dO^lgCmx`=G+nJp znFKh(g`mBoc0oh&K2p$n4KkC!&c3+hGIrX1>hV^B44&!#qv^Wiss6wJ!>*?U~c zY$-BB(xoVytVlu`+1m}Ngt|o~qq6tT=1QVuXRnL0#}y*P@7zA$-@iTdzVG+zb;k3o z=eeg?Djk@Sz5;?Gi(wVRh}d(P9l>5J5j2m3q>t-Uubd?zeAo-zL4EoNJJvWRE92Nt z=B3E_<*LQpya!OuX@JHLWzYKb%7+`sPY!C7wg>`{{|EV`q~9164s-M<+?OOzb$K$S zL`mQIa42r`8`lOBW99KQ+K(-F!O-M0*K;(~-lFm=3Po*;eJ=6fF0fR|yW|d7=JPX0 z8(8*|BQSh5h_Ei7M_2e3SNz0_Ilq?jl@9PI>V#pr4f?gBCW34rRAI&=D#ZFrd$q}i zR0i($oFi!lim4-8GRe({pMvXYp>vH7B23-Bf*x@&lCg;3&(16RIp`>N#6{Hhsh_SV zzbX;SJixi&uKV8KA3JLX zbff4qmQe~;7~D;}n`|d8Rqu2b4CdytR4>mlhdUUjZd(AV2vRpsNRva>wj`=4TCW^; z{&CH?G2^^+Y*9R!P(XeH86JvJPk8L_fp$2S7z^J5Q6p0QjcqkwT>Q~vA&OxY zmAH}rHt;i7k2!m!kr#Q9E)Ly__Wm*07HnJ3VcaZIoW5aP;7xdX@_{R%>JDchVQZ%_ zchW$JBRY5BD}q_Y-^b1OT@ z=oypoONQ-_XwM7wZ;^8&==mTRB${fnVXwJqPM>}((9>}wc|&!#Cyud}aq69BE~6Kg zi_cIc8t<6L#tFKuDObe{kn01#ZN^bp?sP25vOzKb~Y~OdnPFz96j;;!uuXJ&Ub??TxD+; zV~jpKiBf4BcU2(aWAbhyh%Z^(B;HT3u5(iomwj@1_~Rymbo*Z^2~oxAT6pkJqpOxp zkd}PWjFYunc}tNzZv`21Qe0p#PJx88Y^skHzngfH-UH1DvU|T?Hja7o&sLkcSZRkT ze2eL3yhdRi=YMjVaa|#wrvaOc#z+oKSDfc`H`5>RkS5&+O{{)_Lyzos<{ zxa@hR?;6Yl>v-{-p83f(m6zv0SX!a+S&b-wrE60^YWhuk@ISa-ag1tUT^lGLPeUhVG7@9zB5 za(rh#=T|e5DF%qhp%oVfTul0F@fE2b5J}h%&H+zd3arWoI3Qzh9M+_$zrS*wX@cUS zwBIR87`4c{gCI!QUlvhhz8Y#>xBcf}yD{~OT?|~g&e7lKrIs7Ac2`G5+|36n)VWa; zdKj5O;UG_Vdkc(J|E4_q(D@20B}5qAm>DG^^cvy;(}%ARBz=EakUVvKUC>~~#^E#LV3MEM7 zzaSk;%)>9D^DI%-J=JFYku00Df$vdIh%&hWqeVXX6)q6SSDiphc=LZW^vqpwlPtz2 zv^Y=$%ar&XL>R%$#X-jL-kE2QdVbDdOU%?^RXxk%-;hfuI^VHv%-$CEv?FJ+s({~R z<^HiuglszQEQ!>?Qf_7ZhOy{7?9-0iOb}mpEGey`@FX&iT@l-Jx}mveSS;r!_#%?M zWXrAL^E=Ng{Khnxu3%&OX*EZwwEZ}9>Py{`S;dGb#=L;z?5N?B>)YIjl#i0!2xU2- zVr`gDyf1jKWa50>FXb#~`MlmTIesmTyl}Blf*wNzXIzKgF2HGQIUIQ-Zq2gPX%`uV4@d!gsJI zY;-!;?cLT>t~?96);8LF3-#xFUX zgRfe!wE(s>vBc#?mP&Ikak1Ec(>1PV2paO{=u&nraNv`4{Mt*Duqp(!&Q znhiVC$EGiwKUkVODSNb7=t2g>Q(;**;^`3rI=Gw9Pd$Bf;=P3#q7`@V->+&?Ld>E}Fe1D^5+-Sw;BYYtmrwGpOjccXL z0+mk}S96W9-duZ}xZ~oi2Sy4ClEJ0viybZq_)0Nk7+{%ZD0_1?5FLX4*iYZ>7-=hg zf*h|xIJ$KJoiy#h_=x_IPT`}OVuCgDB6x~b%Bj@@ z%uPcsE7Odu(}=KP({Q>q^FW1Py`2)cYVO7Vne+3je;ugQ-Atcf%`Dl$=_v41Bh(zA zT$O(eF{ooGz=n-EsclqW@CbBcB=lzSx=(J6-2Dl@XC#QxP2Fk~9e9g#JjK(F^k90pA{r zocJi0l_>bb!lhCC5#uMGwv91wiYkxl+%2SDpUU=_VQ#?b_+TatQaLpR`}_<%PZFM| zEvrAkH||<#|60_H4kY^~x_%kv2uBwcA36$LJ9Cy3)$znRq^Zeb&)b-~Gn;IH@!E(C z$udUGw$$dJjvP@$5V*~RLFkVmn&sa>v)7Vl^hIIUmO%|1T8A}cQDoB@;|Ny&brfWA zZ~U4P+Ac~7(j$+(hKL5=XyJGSSLNw98|YwuDwaby0Man*gTg5t&~0lvxjRtF!6r05 za~OX6C$6&=7D-sZ?5ci2=yB)Sopj2YY?ee#QfgSg8BF#51+txU?#xC?a~K{U{^@E+ z5^z*byTJC{ZU#PBZhc(3{oZ$P#x7-P=z>ALWfG8T?~QuTSQ+mObA83s5BpxFTY=DI z{+{`b^$sw)+(rf##+x1yGF?p5ux@8`-8@12Q^ypj#$_iRL!X;xG0vng*62{b?~44m zQTFcmG&!AyfsW#D=tn7DQEc-p8GvMbL+F{OnG#wAg?f!%A28w@ua3PCRyOJV1sB=p z4eh>*5&b6-JC9%ra`q193Vb_Y-pQxX4@xcRwRu4z-YedZv&uB`7`dpjwPis0fX@%i zK(~YWXV7TQoSF*{`nHi^CH!>Ofk#FU5Bmxc^dEivuTtpsZ5E;V&mC~c9F866;vzh{ zJ!&rYZz7T!&h;O5c)#V$%axV?Zf%~hF}-I>cbVzYz%7jCprt26^yj{9D@-&P3En$e zDf#>e+oz{U#j^8kA1*mpF?#SEG+6|G^F|o~4jj-Gy9>_zG+plFt1^S?VGIUs&s|eo zD+3|d#qLjB)|v-ILEZ{qk&)i`%d z@5;)Sm8lV!>wq@am8aCtka%TWDNXLghXyFDg>!R)i*;B+42b=QO+!(9a$y5D^*8fN*EE=KD;J z2ss96Lc6gsr+3lCCTG8d>)B#pvwgL+?s0fJEcp3kHot?$eHL3GJ)J3MF^NLsqFp=BZ6s;9Ck=^i$CAFx{?|<~Y~i22C?IwmUk(vIs#PnS`?6`B6jfYw z7VYwUjQ0qrNG}5ni*nl&mTUiYlW!B6v2}W`nDUw@c+kk{UBSGx_Ut%w9jxj@dHZc_ z-zSCJ2n8CG&j>=QC;Ftwk%o0AzRf}7agmD*?oW9O#;4t^jyH&sT#hZ0|G74GHOXca zI8Zn+{HW#(cPxhPU$q?Pq9=Wx7g!DHnQ>{P%AFoQhX>v;TA4dAbFXh$qPiq_+*K`tDTy7u~$nxwEMEyRCH#Lk?NXAg-Icfi2 zdZy+H`Y#;5yt{P8{S|M)g}Mv1x31a z>2@>uPcB*MGNfr>B#{pWPg4x~k0?ADQ9t%oTy2mPa=b?&_MbmusUY@okZLMf3sM*y zAH>M)jQ7%fUZWKQ^t$YS<$1Cy#A^s}h~-zb+dW1R5}$1bcwm>kWbz~_kPS<1%_QtW zF76D@%l@)!_CvhBWBYTDZVWC+y|V{X4}<{r*P$^8*kFH|f-g*d!9y}eg1hWSW#HTX&H?*71p5-db79bGAmcldI5I`Q_| zH&94!yx?rv>k3YUm_4s?fh%Zb_Rsy34^c6!!&zUW9W9k||4*AKTGInZPRCv+2vc8#%sa4eZ z77|>vew3aX@71s*BtRhN)+otdWaTEo0~RQDf1c7W*coU!ka1ao9;fNlz0jbSX=l++ zT_l3}W(YZUM$~Ecv)PT;$O9c<4{1il2bY4MN@hsFT-%cWAOtqb*s6{!s8Wb@zy|*g z_z?im5a+LH70`!p7xiv%o~xY0>cXxwGB8pK9@0;m8vawt9fAA8r*72t3ak z)`Zecyn}V!h5Ip;TD_~#{VHcgom0J*v-p-GNM3y4-_sv}k9o`G?}csR+K|C)6nYY( zVjI^6S9TS|*c|77;yyqhnXiL+XPzR$rMiP9G7E1+xgTireFp*XQs=*zz1 zQ`=$Iy4$v437sIe=@-%Un?v zMHswW%}un%hfp^tiRce9BD&q6R1xN=@GWf4N0-N>|EFP~*0pcxPBpg*GV%(kAX6;! zY;yy5pwas`w(R@x4QTN=tRqE!2WkK0 zIbNcehYCRv6P`M9&eWJ?h;H0_+Jyb7KCkW1<8X7LYr8VGf8 zj`76~a$3nVJ$KFyGkt2m6cC+0SIcPN*_w(&1pTf`2Rw&(8*4@b-396+!VoTgT7aIQ z!>shRnog`B4-bz)VrHQ|K1K#7>ww-Y+b#4PWjato$1&Y2sGu9LAU*lE+zSW#rsyI4 zhw?S4?eC9~H;2Zx&4A)>Lim9I9N~!J0lV%6H0f0p2^}|MPK=y%Zq?SXAt+vo3D#!A zPBm(a$<#8ISi|sE2&N3o!pD}9`IYYHqVGUnO1S+M?+g0j;>*weYeFYScz>edLHON- zp%oF#8_PK&Lz=yd!yB|4;|=Jb-HvxK|Il7qURd@P=fFL=P-P{yaA#21`Hq&?2Hq_r zy@DxtUUTAL9F83t9MlKTU7h-6wrK!4it6(|&QE$G*Yf>9CiNc0pFr$5L@6)6&!@Ae zk5bzzu@Yd%isc-8EdFC*G~(T~6(Kw3Qb0ejlHZu1Qhh{vY({SQSmBa1UeVi#e>R}q zVz0O&p|aW~!WUFa5mO5ly%NE;nFYGA+k^VH!Th3TIso7N;>8=0k`4->>OTPtyj?{sr{E`-p)C;RjWDPceqt zPO>EIHIjX=Kk+%JlWW6@q}b*O>z{4LtaF{qmGxF%K}$a&jh(W)qi?}Wpk%<{PA*MN zSehfc{z`9TVfgp&;W!SG$l>q%F;XJ*zU1lgA;Lbim0hdBpZ@V{nbjg?j!x@ZxrE|C z>-ypG{EXp~2Aa5#0AQJ&9IAAi6fJE29#fGZaspmX;Wh@Eat@)c=^|DSIR_&xf^<7n zQAW??rkH$uG}}@`7INJX0ePf(PEyX1bM4{+dDjW)F4g9>fWg|Y`k}jh(0-WDZO_$e zeFQp5NIhP(YrLO}RSj_m$1!YJ{Y?f8Q{I97`bA@_J2P0f{p_nX{=a$cLyXz5E@{Je za>rdaURlTQ@5rks zMlPksZ_eQsRT~JQ&o>S2^dD!@+3{^cXf^8Q-+sO3^D6^^!a$7|bLZ^e%_LlcZs|~{ zW$av7GyZ%0R{h1^ySXWpWFg<*f2@KuZgA5BUc~;i6{i@H{^FLZr9w`Be}C8Ak@AJq zha&EGp;_;S>)0ub+sn%JC~?K>qF2Vs9Q74dR95~vJdcrs?d5BV&7Lqb(RC72{%huDtmIRYq9Y?AyP&JN zwYHp-fnG0M<;uO#n*c*4`!FW@YxaX{?RZMXKq0sNj&whaKkM_)|2pel{#$26<}toO z=jSl*I@d#g%HEDX*UH9T=L&04*{A&%!F5B6$2fCQX09v24FIMG$L2#_K0gla77{!a zJGf6CX2UaG88|+qFz~rLw`Qci#BH(x-14u1s~zPQG8q!vb|N3ld(Xhf!uS+(zOJw$ z*5mzC0Cc`^SD8G}5cKs_fUwARfH7wqBElX}1QIXd!NZ6U@V}SD*Eqg(o~+8mN7uQU zu_q7EX5}wn_6elm-+D>7~+fkSNp3rGb$VWb>tf+N$%jX+OS%;!mrP<_S@VY$=ll zP>8cCs3!B=i66go02H!ml^7&s?3kv8f~WRRsXg$+CqGE{)v0OoVBCz2zN_$BlXhOH z5;{T~a~ZR)9VsvY@L%}CYXX>N3Z_CNT5eJ%@pYYrwJ)|m=@21Pc&}uI<(&AtQ0)zx ziIGnjclDh*bX=R5S*}?MIBXmksT@A-q}%@Y=-|C`!1Yqe^EpXDZ{<;#_SGNL*Sgc% zN<5gHGz@w((?67d`XSF=eT-J1j_){i?2M0)ZU0rt-$HF#Ex(y%*^k`~(D!^(Kj{1S zU})G$F&T5~=;;lsz%`(%FpHyRy8TAJBPDrlH>4r8OFp9eak3Uuu1TH`(nV!rZ7_Q5 zTfb$4mLNM8s|G=KUWw+sdWON5ZE|C5YO^cB+FrbP7DqPBAU{S(G{DVHeKo}eL?2gK zq;~8%*WK+|D!s?&?z_wva5cy1SFBnx<^B$g97$7bbI5z1xy~2?gAJP<7gu3x@wGKd z*#x;OGURl&e13Mz%oc1+aZa+_A+qR&7XkN>COE2BZ!#9ChW-&`+5~ErL#d0clrah_nxUX}GZmz1AyUqEB zDEOdMj4CWD^0X((f(C!Sv}ri%!z}a4g{NcS<+$>S&t5aWpu}@tJzl59JDio!ufSUK z^1r7F3J5(<|M?@=6eo9ej3^Ny+5LPYP~LUyRpsc4(uT4ck5aV2i_9F+ck$$n6v|Sh zr@pC9TtWRE?${W_%iBeDhx5@C%%qS94FZdh%J~6aW#!CjzRc5@3phC@(|X)@&0e{! zj5+$MVM8v>&D$(oLhYxN0%)_XiwtGAox`gWzR+Q_tU~_$R5|Awf~bC-H+*ZpS%e4& ze>uS+N)r{leOT}^=E;*P*Q+!;?8@f{r2hTDN0DN5GOl(Cp0DBlHD*_%s7URAb|-E~ zc=Kv(1V{48jm9T$s_Q>?Sa$e4RUL0*5DpTkQu1L`$u^R#c4fwIgk=?2)eazUw#-eP z?D!e-mR%_V&Hh@-8nY|1tN*F8I9y+)AcD1HyI}dA0Ht~?UEdU$UD)%yQ;WFzpbVqZ4 zA==5d>WksAxhF9(Tg#93d}{wLD0eaa@Q>B^NCC$GHL3w-;s*Y-$Xxfws@HkX`P22I z@o;_TYsXcer2f&!8C+@*L=ngd7}(;v)qM4ign&R_^Ei?(nqLF=_#0&`8uNGxgLhwM z){S=!9Mt5!~GiJ9kmKZ@p z`!-05C>R}@)dG{h=LS*4ns!()zEBcAU%t=S|7G_BiXPNJ4HYB?H(*p?QpPMD z=0W%ZAK}enZfOmWZ0r%#4ZL&;y~J{Sl~V*(lcZVY18Apuy!6Jm^IyMyT@=C0___04 zRPNwXY=jB-CDNOy07YgT0|;O|}-XKUws>iBktUOBM{ zW~Il|ioAaPz$MgJ<2lz^)XJC^rF3w5@m8Upk2WkYRg(1&2&mbDvh2Q4w_e}(`d9bo z3whDDNZ`(FFA2Y`=R@uy%YhYJkh5ISF<6^lSg-Dtp6i{_A~1h^se#%ieNY*%nb%nw z8ye*^_ZX8C-&89OSML7p90SQXsmMDZ`81&3RlXQ@7#3Xzo6u)R-IKITF>vQxcf-^J z-hInUy%JSl-o6W;pEab*dZJ2%pzTeLC#g#m&qdUDRzqRg0Rjk1eiF354eqm zoDxT9@`q1Z>gx154bwg_3**0MwM-!Z9nfrK)--?W^9k%bHckGeEgf@4@#E8}~A zQ0GEEl#=i2@>0hbv@NFhJM+^D`|0GIx0IhpJCeEKYq8O7dbS+{RLb?Dn5-#Uu+mXl zwp-67yEirhG*NqDZmn=gp6K#eCF-5(mC?%UDA}X(p8aP|@K(dmW-@`p^>ZhL}Bg2VO$ zW(%;@?{fQd)t2s4Yb&YV1r!k?^tg5Mh<627k|x<;hSP2a$*jQf1!DUvsCL%ye?azx zNzd0VMoD9`99Yb70opcRdK~JBnJJrbZVy%eC_9?}7RK*gq`Q6d<|kKMb;t6VQ0f$J zrH29^$&2;Qo)}k#_A=W92c{E3$zAp7CVA#4lfk$?s1pgB^MTxa(DXX`=n}j-zdL4; zzk>Px&6#{Tlb6tFOcp`5 zigcUYsZ#vg6t)UJ4;_9rh82Bgd>17Zuk%7jj42eQ_@?^0`{~MKmVNU#w~?nyH1jjT zh{Q!OH*mwxOI|S?WKbNY3lrqy=T{N;VfH^}&E(01{NVF6J;U7?%tTtm`ARxJxvd__v)Hd#S-dYvIJBIO!I&$AOPw?l_6^F4fCgj;(kAQ&PwsxC?U$wd! z26WkiVp;0us7a%O4Hq)q;Z!jWLj(>*4Mi z;RJVegy2%*dE3=xAispy%t{~T5slq~m#(MsDYb=_z0nqrV0NXB{oGkHGP3e}PZM=` z9IUrxz4f-Mt#<~VH_S0EHr68INh|2@?<)jwIyfUQxPADAxVLeS@fh4#s>ATOe@f+s za%+~EeuhB_D!))Jg6Jt6F$YU}JGS5Xja?f2;>sjUZz->jnsN(sYXN0n@;A4`D>%78*S!8jVvy-$5NO-gz#Qw4Ryo}GopT9EY>m{fU#EaE@ zI6Ch7`MuIJe4nmNR_xNW-to|!0k^k=n@JfPs?@Q^ji*N+BonudPzw;To zrbV5wEj-;nS&rhq>O;PCQ$~p7R|Bw01RPo-p=_a1iI}O0s+^|M5@ujvc!o~N=%HD{ z3X)c`Cl0Ao8F5c)Z@t|stXa%`JtS<061I%bT;Q~y>uxrdWRJ!xn)44aDj)ZGO7?Rv zUT=0z9mmo3(iPK-FXdvh;-Am^Q^#o&v<|4}P(H%l6h)RgtpZa_v5_XG>_hZHj>|&G z+?Zr2EzP^8Fn=7>METBiX$lWBNPbbe)2=LG5?%)SHxspNlb2uPUBFNL{sAxk6dub1j|GaWZs&>v)d=Fskol%N zpv7&<Ix z7g!h??0YPOFji+ccw_%`AMz%pNzvd6)xeidvI%-+c^_t{F#dEBK=k4Pamk^!1kxa} z-C{G2H(9%ByziL?Mc^Pj824F-{X^z*^0nAA?dR|geyx+vWk<}cxRDbKV|UIn0qj+B zmj>*Tqy9tM|Jai+S%=-mdgN_D{quDqgz4c&&%;rSD+ivNRovIw=uF#Vh zoMUg}p?{{O$4~SH1RrYhyat~LjZrK6Mum)IzS!9UI=sT0+kboYT z6H)TLb0?oi>C-(IIi6OCJLZ(T)*dBmdFJ&y8%f3woiyTOw}o+$Ibbd8SK~5K2ee5} z7pB}5_tRgMIIb)l5lSOG_9e_$i!**4pV;h+;DybdU2*&uc+PLzgXiJ+1X8rfFh^D~ zB)l7M|1K?n^#rTZC!J&QI%^aSkXKM`396gXFH6$R%9~SbVLT;N2gkTQeNh%RVVmdm zkML^jm@G>tzFDH`z>%ww?c;$S#C`chZ&=9@czTclV38Jr^@g20KWA-=>2<2Q2l&NZ7JdLF@9{p6EqHerXmz4tMIBzIr0jNx&(`e(+F}}a z>CLaC>-R;r+C?3^{~wqL7K?B{1{&Yt9X?BBBbdU$q_QYIlsgaN-%DN%dFJ{lX+_>X zgGPV3zvDj-)|o#R_%+(xFwQfE_udX)x(WBxzfm`&zMBL(a8Gm_EgVq^4*$>4Ik`%? zDI$ueq{ogI;8sh?*Ph177WI;AZ-ajO4qLbG13g$^d1vGX7 zKzr>}A)+(*Qe>9=6rqON{{)94$ggx+kn|DM#Yc8vPnCR`bS?kwmd^LpV1Pi(N12QH z{Q!RVhLKH3MQJ(b^|Wf(~kPtt|emxB&W{Z)x~k( z+4De0XhR%}Uddev7E$ri>)7_j{>16Zo&(w<)%dwTSWv=72u`B$i7R}z;>7!KS})+V zcF%HP1w|r?nRciSp0Fi=Y--Z(E4cT-kNB4~UkbiM)%NeGZm6r{0yNJ=5PW~rmhvjZ z%3bNw>nmTY9qW?|CRG`Dl%i%|qG@1+oa4TCDTSXf@c~fAg=jCI2nJ~A&0n*-m^WIY z$@XP)(+hOwN)*Y7N6(rT4a&d)@VHA5>*0E2B{Slu$Rscp6gD+rQ_`I*bo;))Y-683 zR++uH=`VhQ#Kdziw08X9UdkNPr_T3({u6>I4X;vGq9Q>I7@7HF`7^tQXr$-I0$t?0 z#wMu8E;}w~&_ZwoS%RRK-FN25-J18X8S(cXb}Aa+$`OS|Vmb0J=DpCEznPy&d|A*+ zaUF-Yd6=x3bxXc@)}4dsLkP5^UN{XZt&BQR1Y!#)qU!VEMKicx!Lm z!}5JTY8gy4K?g)D{7w3zX}A%^&P90RTRBFFWVt2Hw_TkQPeFMbR*biu+UD^3cm&7- zVKKviO(UGqp_tFB5M~V!14LBGn>iz&Os8EtTdQTE#OsNf-<{8lteh3ziO)Rp72idc z3d=kB zpCBbfFk7=~_^kjJuz;Y_ z;xuf)#rn@ZGr>}~p`BQQ7824YY+3vw{a>2O(YUVySH|1rq+npTa+}`C)zIyG0sAB* z<8_vgNhZ(|FWPzm03nh~MsvCD6mIpfReXIFuvVqQWA>q9EY>g|cLZI^<4+ z%=HgDjvKGR=pnS%ZNE!IGB=DnS2XZwD6o>mO)@^a-?@-i?mBsp8c_-)PX7^qX6w&Z z!7q@Ckhz~{h*>0aN(@*u{=4jf7xM1^>wub)5{^)g2-EivtPPNh6DK#dGwpg^dc>@I zxUJ&I4jUw!H<&+OU!js!9b`z<=Jw+!+5r6Mg6Bq6i@z^K;$g`OI>*!u@`4kD_SoIw zH0(PtH0_yJ-o%oHBF8wCqV;4jtczPU+enuW6gy;u&^tf`l=G}?6Cz(qpAG3ts3~56 z^h?4N^7V88*Jn?X*D+MtdU<_kYU?GLX(dFK{iR;=KfN-TUJs{49C~0>RDqN0g%Y+F z#2D|r{OW}Z$c$F^4!51L5ZRNY1fRH$f`QuNg#1L81c|z<5H{mJZE<^k{ zq#i)B5jDx3-mJKEO5)~gG^RDfNe8v6txY^el?~2#4;Ztct+0y>r09v>-J3TfApf-; zev6mN2T|z?l)Jmo7FP1KLIHG#AS+FN{1WO$D2y3a7)jQjxYDPtq&#T2xTOfmOrKhQ zrN@uO$Wo4IRNaGpm!!>hU{PD-Ei)dtGbD0oYc3&RVV3pM&5hPZV=XJTP3AR;&lv zB>iQE+!a!ECCZ;9k417?n>DR18P8n2!q!Z`%Ff;g^le=m%IzC` zsdj3R*0^A*zbvg989*9TeKGeZq2rBws^A}`3`f4h?F2T$c9;0-Mu&12|3cOI)>`Z& zE_1=r-mzz)f5}@i9;-B$9m!24iySBd3)x-RVxG$v9a;+hDSB3~63sq&dfrRNIUfBe z29uZnG0cZVMdff53Oi(L6iY1s0PKL}SA09xhD5}5#6D_)g-m{($_hamo-MF%;uMY` z#+c-7Ijm_eGeE{PHgX_HfNYs6#1Hl~%F60@d32L2Xw2!bV0y{TK@p|{m9NI}+rgRw zz7`>0@*^5MuxY2^@$oX1cSe%e{`myiGlVO6DS5YvAlKy+DtM3XUqX1+BrkcQBVhdn z??HwJ``ZHg6caQ-Zp%U{cs*OP^7(v0G=}bnq!I4%+B2`S2^k@0x+BBxj((^JOU84; zfBQ(!z;La=B$?-JH9c|^yADUl)XHe+&Ve|i>6o9!ArawDfLYX3;L&YhCRc3<0mK95 z5;P8nm_xeR0NwwFeUyX(s3j30kdlzDo7w$7{;cI`vt%iDkiKp70#kA1PSySjGL$fA6NuRYGbk5;Blr15;^D(Vv(#=Cg z0aOqytg1Jm1FsKiusSw$gBDh$sAr*+i>X`1oFpv0bqty!nJ>;iiNgKQOxx+=en8&V zm3$Hk7j%GDa!m3{?6Y6#;F%DEcP8roD1kh1DPbL0{J0~S5BRFMYjXoH7MP?NtZ(UU zDAEnl><^OEon7fN7BEQ|@?SoP1Ck18cFV*}&7I)-#X3&Ee*f(=#gNP5+gSlUxRj-< zrvjbVKnYzpr@tYAJmjtl+L-O=dd#1BewiX+dsikLqC9(`k$y>*3S|enxxaYD7|*au zBrgC|c|N7f>ZBi2`Um}6-rq;F+i*7)nI?;ln2d{-py=@#FRcR$(I14fkjWA)DohM)*`x2JEePGW2V}vzDx($OB z>9dQH2%{Glps%SdM%77^a@n_pBdCZAL+0h~kc7_bR}RYkgT|+Y`I2*5pc2~teN9J) z2}V*CGQz0NOhX}-Vn9CwZu_6z?ogz>n)@>{Mtx&}ubdT4U z*S_APtUJt|^P+3&RbIp&?~|ZVlkjoX4fNnK8`^r`@jk)?74-HrY)IEnT9p7R}>O8vnN1Y+S?S^iNaEcNKMxpII6#D$?o{ElAkIn*94Fh)oTeylcOtlM%CFPrdxf zS&}FQ0x^ytaLHD{x%p)nkk+ZuH7Uj`5_t$;0fHwBDt(hIIpSAbvCfQt3^d%Gny5x_ ze&igZfqFPJ!290sMVC7AuA@Q(iO=CT1!I%6Ef|<=NeO}u_6&dtz!<1CJ=>leAd=jZ zoj58eF!^muAYH&n_N5HPlGlTOP%(fF=*Gjr zrELX_`?4hKkSnuRG48yE#nGwk=i7 zvW6tLykz8zkHiO_y*sjbf9QtEt7Lrm3d$$DunKA+qbt8BfB1(8`EbZs=NOfWnq@sl z#F76-G**TD-Q5s@sG(QoKQKI4g?j<%Et26=PSg`mn%&-lQc0zM$|R)L6EZCdEKmar zMp!}e5MO|I#~<_GqCCgwS3FMr?Gt0q8fXoY_?!8o_=fVJ!c7T>5*6Bx_%rwu&xzH8}}D#+S)0aCEegQMeAc1q54= z!74chIZx-#!M2EZ<32pOvqn@6)H^m%8*8K(D+Z4HhQ&mB;TP*lcb=+*&Ac_$DjQZV zztQ~QZic}^k5_}~cYDcrS_FvPM)HNMdyK{0ew({nOOqa8G(uH;-Z~@*l3oe2>;Ka> zTTW}gDduF4N4-LR6v>PV$sZkHgF5tjS0WDil%f*~M_DXo1CJVRNRsJOA0UQKoIKg% z_MCMYbZ;ACyIdOKpoV_uVBqwr1^?DN#F9_RLuJimSqq2l*veS;4J-h^SK;%gq_9RC zBl#fkh5HHta18SD6V#^IP7Px!oYxG7%T5UFL0v5+&3#Tnhv|0v=v?01}) zYlbwki<+l|?=h#cgyh2^<*xIE<+mlw`IMK!w8_>G6Ux97xzpyK083)RSM)&rjDhPy zQ#b;M4-O?jfRf>@uCni_6T(3Y4xhmfZZ~W6fmz@PYaX--tvAJ7>J`~)7CDdGeeIjh z5dp@Rwl($mM=1X@*00=k+dD>DbEoTiZA+gju1F96d{gcGml9G~~xX}HX zC=o{s;3gy4K(f$7F((Wu2)0v#-Vh!>QNQzdOBx0s`+6e6-38J|6Tx49mhv}QJiI}O z$M3<)ih$rxX>Y2-S9}1^)y&wx^A!}NTggcixTRgpag0)cy6%WyA24&KiO!unLb?U+ zBq33Z?zHnY+~W>C0=dU?hn9>TyT)Urqix&R|LdO;n%Eg#5cfwG1`dn> z?er$rS?!9x>S@UwK!m4Q$F%QsY@LNmS)Bkw{L#hNWCC)8eQg?|a6{Tk{)`Q~Mxp-h#fH0{AU1eds;-saB% z-lyqjeaJHiVCV2QH_d&J-DSQJxoQ6*h+xwm4SKSrutSxzXU{@gk4X&wZ5CzavqQ1n z=Pn$@SfEWGg2X~ar(4-4C@AQd&qHfcCWUY4jl-}waIUL$c?JdSk#PwKWEw2=V;U{Q zXxtfzYGIRafb2KqhjOn*xx1)Z4V*0fkYA8=d842TjL_3HS!B}^FB8J?ysbc29i8O( zPWgoLkigrr5YR(ho+jv_Bq`m4fkpu;B?eyxwUbR~m1&1) z>Vn~fot|w-M?@J@jn?BYll)cm*cXsesTrrikL%V1_AN6-`>*8`unJd!OJfKXqzEb( zCwC>M-&vftsYK~TFzsI>AfCLKGAHxjVi$nN^#8u(E&k}XFe^ct;EflIiHWKAgPeKW@fxkp%y>s|VF~|W?{x+!aA%Ct zB%vR5(JJH9N>D+Wu29BN{8w%F2Ot9hq<}`xLekiKf{{B)fD?iOV{YZ;&+N*`#ln;5fkDG4gma8B{5>K8yF+&Y#d=Hx=52d&)#}m_f zq4PB7phEb9sx&!g=lULY5~+6#K)q{Mb%uuPA&$Bv$C61jhlCok}aPb(&T(n2~ zyq(!6GG;1ap)X~A>Yp=z2dRTlP&z3e0hywJ`c=Q%GDb9h((Y%7weWjgkCLdt?Rojm zke$L6S3eo_@GwYg~n(=XG*O$oiSMoZ5_3rBap~l z98&H6!zB>Iurk(T zFa}pnQMilF84Bu=IHs*p^hz=L`JBa3lH$->KAZc~N)*Uw0Ps~WZbsK%zqaOxy_ME8 z)O}m-fyi7BVcoj%J`g@KAsRtP#Mg^2POhie${56V3YYpK2SGzx6ffKoClbCAx2p>~G$r@yHp&LDrHCE*T*PEIibeq`@5RE@3-gwL;Vj$8&+fkAtX&wSq&HU^%s8 zrB>zID&jtM<;s=uLD$Mj)czzO(EXZi3dnd4UHi^k6V+yYB}-~Y9L&;UUXpOj-)HLm zr1mSB*T*Mt=$uPBRjTGJq}>m}_%5XUw62#8B##5{S8z$V%Mz&wMZ7Ve+j(L>JG;(5 zFy5~)lHOh$F3W;;E4||p#GfX4-eSZI_{CEYV9dO$;5tKijbB4H)_=8XD&(aaWO>UE zjagf{_m&YjG)BDSUPUR?i}!UBGuNma)O@%Kp6Fy~Bxdrd1iX-Lw`;w&&Op{hsfyTl z022Iru%6Y%I?`rvZLf*VU9`~=FiBLjY|T8W%i&zS&d))t_~*{_pQbrR$6mI>BQ`&I zu`I9|5Lmow?%SLKPJZN0J1hqhub3Z7sXkKdP2~Cl6kM^co|l z4tW5@6b2IE-eq0`Qj{zdM*<9;|ISBKFCu`+;=Lj@|5d6;nRXvo)j}DR|Gh$`)X=Jc zj0UA2eK$fCX!SyJdu~h5)qG+CHf-xgfwO1Mc-x?+8dWtfCc<>76p)e z@f?2}sWZQ9Q-|6yB>2KYiPVa3he2vj!WRT=kCLpU@u~%!MI>{|eJxvt%nb;|B}wO? ziEj~(SXyrh5b~(G+09oBeTj%RZ8wp>D*$~W>}+W^Gc){+x|8;C-> z=KK$GK=gkBsfNyneV2iWrXnSue^-MI!r55&NeTjgB;Us?SbTY+XD{8#BnM-3o; z4hd6rz`E_VbVJndlt!qqJ%YD>EDU)F6I+|Ko}Hh-Gwb@}@*=~(%5e8<_(j(!=EAPh zWqE87bjI`><``v@vX)E7_uxuXVb90vwBg1VtUW}9-WKs-M0oRYMwVyr z&r5hNI4Eaxw7{osR5olC*Z>iiDa816kfgqBVFeihxhqZ;7B|f~lwt!kX_F|GnRf0H zrr{kJihaS*F(pl}c~0YPYmm|S;0$n89(+))5!v zKsnkzTcKBL}qR;4|^W+ zg$66_Oz~U*R%2(DsxCMzQCNoRr=Q^9qLa~EL_Wc;dcMX~*K3f$-?jcCoe5Hau8xNb zsS1078tAJ0V>|WL%)%E?1ezllMI;9&I;ZMr*j=yBGCJXC>eU#2yQe^2WjqY;rAk)E z?x#c?YkS6~o?~Qp`}RZW_S+Q}9N`sL#Rx3I5o^c^x*+eF&oOJ12X|nwsBqPWO~ZY$ z*|tU|f`o%)8UdI8$T%Ln;0FBVXMW!MvxUJaOZpuBKla}IAL=*iAD@LG`;sNQX{=dW z>^oryHQDzlvdc2|WvFaL##Y%%D2X9N)~UQx*|P6UDnghDWB*>${ki{#?+^FmQT^bd z*Xw#+*SXH}JkN7ZRW+u<0AO+>-UHqZz|aI#KrLvT;l2>9Tdn6P)Jb-JMrP&XXEngY z8_&-7383s|p&ld`86^JA#H3ET&mg14K z{gdoIE}lg=Kh6Rg#}ciaek6Q;c2NZa8fc1=Qh{4_1b8ZHt8-7^oYX=_1tUyrt=_-G}3kPTIV6 z6gRuNp|LZoEo%s~{B`nnG1~L=*v`PaAYRJ%eE;SpAMm|5W}-KHt2{MV?22LDUXQit zcvA||w}4Yth7mQpO33+&DJ8NMCXjcDswyKvU!!R$-C$PS9QVI3&iDhqcqtn+?we#9 zr*mY8mI@N!(%ESzt$nPnr$151BFsoklmO%fBBvx!%#r_G(Tv{=*W$r2 zBsH4f45F?4HEc4{IbTxruX87g$r5M&$5wwTEFx^3DYrCh8X=HE&8eQ}dAL8>Y zRKzte!kVuxSesM>Ac|Q&5@i74e|79DsqSlp%xCcSA6(rnuuHKutxW+j>Bk0z9g) z)`!rx?Mx+n`7BM(pn5ADn1ld44OA#(*O+FcTD!Arl49^|$ zkl%t!_Svl%Lfz@6-G%%Nt^JAqBjel~8DLbK7g7x*vZeC6L7*s+YJ9jZLhb@$aSS3c z{BZaGd(GSJNgr0YFpKDc3`ny?fUw}ZP}1GIt;)8`E^{<2|FJxs2bZCl^&uEcHWAaR zZcBY8$ZwYb5{UG~9CE8d3D7zRPf=Y8eCdq_K>c?oF5-$oI3@52X@p?~?*|-ifO(x3 zE<4JER)L0ReV}-w0$4JdS$4gL5dV}yqc)%kUXDlu6#B{l=J;)KUmFo$dX9>5gBJMy zW7SS}^xs%Wo_h3gCxC+sXuRrcwNw_0_3B1uWbuD4g`=RZT${Ek2exhS7zvH|ojtEiF!*2%E(d67 zQ#c2p?ADjDbYtEZ)>(2RyEMbg9-s;pyL3$lrJH}1r1jCs!Gt)~)2wH7)$(nU+yQIp z7!mCk89B-MUek4FZRQ+6wD)m&ci3hDueCcq^0(u%AE1nsONr6|%j8&Zd8vy`&d7ZN zj%*q@ANzXr#Wb^EhEy8#Ot?LM?eSAbxnf?f)n;AkSO1U>&mQ1oR8=bjeW*ra2PRb;CENH-32h1vD8Z;95(+!+RK(+88a=I^K)=h`{9y|l~m>wt74LW(S zy;4Wl0#3YeddM&bK!d5i1qDQfJa^e*!7xH*;$`YmXVzSa2{o3N=>Dgyg+LX>%1V}d zkPm=38{@`HX#9l+YN0*O&27FB08|gl!Bq7-wdX40Leyg*U{K+eles62=)Z}iCyt&1 zX5>_0zbP1B;^Nyl#go_l(}h-uM?G)v!)MxWVBAB?jCj9h%bAP+i>`!H4=r?8TngQ_ z3WgYw!FZ+n#EYQ&voadbi1P;PlLv`Q=^Q}Xl=ygYN>HY%ISOP^yIp}0v{MDlgZ>-B zHSBTu+=3sDJbm#&=CNjK$*sG2HaXDu zz3yp&l%3lVCIV8`a51>anN)CK4$u-om^AySe90%?sI#x^-9Vc@uTZOz?cgb|?@xu$ z_S`~NStxW{ALsj+n^eymvLC+{QFP4C&3Y2us6Q@u&Fgaz$y|W0iVM`y3R0H~odtwB zT5^Arp(V|X+_yFNyqbDc(yx;VVrWmmaRmCn?s55T(o^fD5U}JS*qZ-&zJM>Ue;)(N z0%y4YZ$~~~ug68>UztT8EWVvv8eF;)*1lQD{iA*!tQroaeVpzh+*- zT8vt6<)m!SJB?*kY7N4O2S7$$#0^nf<-d=T(yXA%O- zFscKqSEX}|IXYgS4@@T+8(jn=HXOQI668TjPJqIMEw6myF(={syaq9@`HwZx_W%G0 zKqh|6@hO0XcEwNOT$n^#IViAL7Rx}1jC8Gt;4$_=K1f&1ukuge#8_Y;$;~MG8@6^& z0IgOvaq$}C2R@Oo`D-rV_^F(()Kc}cmV@&^QYDH(`*+rH##fA@j%LplWVY`4Uu$Zq z@VR0e^CdG~`gYbDL#zY+HCeLqcv-cOdt(iv6H28gzYg57!&~PSdgJTwfr&p3phL@B z{w4;>5!-vC+VLks!-B#yvJ6wQue5@A8U3UmfVhOdi8dUt3hxt-K4pUF+`%?xSxIdS zZ4B-uHI?S%O!FeChTqm+2z+Cb9(^NBn_n-S)Bo2?iyk?qDaBVpA@Z{;&l3O&M71r> zBv#xyU97MQuX0Ykh740B>vTHxG2Ck!ri060KuHMRy7R}6>sqR+5%OeR5_n>VsV7s# zu(J7>Vgag1`Rcc6P^-md30zn;ATB)Q717(mup@5uv5};OGfWNw*19vtph@WPk~#2K zsNcM70+t7iA80H;K$SyitLklXqUh~5+xW}HE8KU_sIE7Q6FUMQkG%7gCfii2_ydE6VYU@HmJDn{v*{xr)>e+{q31(n+?lXnQ5+0}*tkq}kQ5s~4W1H#_kT zPB8uPjiGQ9Y=j~y{XW0)8l)P?5A@(Xl0}V~<2)>`$x7lX{DoZ=#a?QU_g(!uT z6;bztUga-!GW-~bqr%|sIthq5`HtNJbi(?*gRB$kU@s(W=NvJi?Nz)gIhAaIwZSo}B zdwW-5Yz3N8X>8_0T}$#RY7{L*T( z(~|%?S8Wj1_3b#Ci*TjzZN1|`nCd?42x9@qw7B`6Zf(UN$VV0 zKIr~C;d7fUVdd`ZcN%uIz2QF_JwYoftKG5^Y6w2+9BCGuO)Q`~X!IAeI5Hp5b0?O) zSXgFwObDO8VDl}!`q;S=`C*H&2jVziXjs5_%{KxrM)Y=r*Wk;?*wlGE3 zbovwi=8o@?y{ne&`qot!5Yn6rNqr{_^#_h46&t8IXS;Vi(Xy`awr}t2_qj(_d48Wk zMNZqNnQ_+)7v;dGcc9#yL1_95_tV%^?wkzD%D?^N?#aQXE59X?(|^tNlIw6~rxZp6 zJ|f)s6+SXg`+dG>J@)(Xd)P<&Zs}l;{^7&eB2OWn)!#zBeFd9EJ7af0&WrV6|=Hqg}C@p5Pg7}v4G9Kh$Fo|DcwpG~ab3N{Wq_Q7q zqSqTq%F=0@@O1|LiL_Ip%xI^xo_zufJ*O(v2ubm>hg$S#KCaWDcR-#E|K*KG?_(+C zgFloU8|U2)To@SPAY5f#89Nr0qwA;0>>wVbg%+yxU=0p+jR%yrQzVUk6>1E{3XX^;p?BHQefbe1R2e z16>5(kuY%dnVEvX7l;y=Vo#2bt|va&yW!^Mwg^1LXTUWxcjXGiJ6%MD4i%^dHxFW) zjmqQcYB+51ChlT2U|-%zS48_isaT{|o%IqI&JbER_(+q$SbPG07XjpO&cws_qr>rr zU`iS<$p3z@=JI8;g@HVO(Xuqiam+4DZ=`xR>}?V&Y)eHe464$RU|Yw*Y_DF{ILe3F z@1_{E<8>2Ez}p8orR+f?RyKYuph9ZY!ptm&E}Xw62&fCs_yD+m<9DQ24S+|$u;sfI z@4Mc{4*G+aC{erOu|nfm9f6JR2Qx;55lTC|Us3lTdN-)~=Dc&j75+O0F>8=x2(SZ% z#ixH!@fIrzdAqUWForm{I{kJt8!WKeq%7!E%@YEhq*p1BFntC2agmyO9uSi_;Hs!e z3m5^u01hq8x^hGrmh`RdSt-6=Ix)%BIgm3=0##v&0A?Fs%0cZ`oR90Rw659u6OEq^B60fu&9KlHzKt5841Rx=c?x8iM zc&5NR{cYp(*4|U#jI)SYVDZH?)pQF=`Mr%`9mOIXpB;Bh9@j!y5CQMNbN}DNl~*yl zm#V)KsfFO;u0zWsmC9_g!c-wrhE!ILET16Sy0BgzG=6x_vroi|)&j~u2I^?q=ID^j z`QSpV5)ttL>z#vF1Cf(}bXZqeyFg@?7yqF7nBi~yA>;H+j@Rw+cOU7xqL-_rm~7F5 zuNttclS?D-d~hB9O8IoK^;g!F+B5T)^fj>kwJhT$m zb9Kaf>pqcPry(Or(#zPY^49XNs|w+oH4Vbrws^xF>PE;`#+L=zIT14PjcKt=uHf;- zkalC=WoMcLnLsVXZL!c7UHmN$wSx7-p+FRYU{NP5t|N{fykLuUXt0mr3_NK~Zz}!5 z;vEU7oVEiT{`@i%Rn{jsETr>--&}Y-3ZQ^Or_Drdn4?k8VH7vnoW$XJWsc?Y;Me6V zEnQ~BbpK;EbPHI$#tDrRRB>z}3d2XhWZAo5_qa=L)JNg)lcV2VYruk~A2Di!RfD!9 zrSHLm_bsQoMk;Mw=%#fn=?T>1V{E0jP00C!pBzp!n~_kYm)Yv|hkk1yy1?CmwgH-; zIn`Ji82i#Ue~sxhjf_qP;&1TY8!K2%TPc74ZtB9-=6ro?lov zj<*T_g1-8P-?KO-721bm9nZ4Qe`?OwQKIz)Bj;<bxL;ITa$$ zE-FJ`Ik!c^nsUcG3oR_^JD@5jl(Ax~7t1*EMmT`!f;f2YnV~}aJ39?H!;x|IlZMA! zwJzZ|>n0qXz@0fEkP?-Q$6gImXnLfLLW>1KCD3g@HC!tk`zCU?-Sz~&SyftF#cLyG zjVw>d)6FtPYuDDQH(De~dFfb5=R86rjJdPf@=3Qe+>hm@+#nr8z)?mB1l|C`a-!y* zt%Whh?j32B7u=P>e;4Mgs~CFuEtTG1WfDH}(ux({0c*{FcpQczey0BVfCJWla8tn*{Yr`a_WBLjBos*`|ZQ6j4O|~Jb0O?Ce?`qGXwT|v;kk* z4vIQvhLo_IPssA{4qeM_0s2;kaOOa@8hV1u;H!~q3KZQ9Fg@`@0I*3C76F2X^nr{K zmw9c+eU;{HtE}Uac{OVxV=RNOhOa4vGuG7ccb!JpQFREwMRTY0FH=NG-X!_tsJje- zLZHm$be!SPoi(879shua0IQVH&|8f%y6#G8^;gbY*0!jvA9jO?|i{;Do$N|VW zitPYZ^DHe1JQx4(N-B3-y|li&D&rB_HcT6_|Lgez4ED;tVcDJNnZi6R{bwRdiTn|y znN1ZGWW4gET)V-*Wvw%jrFP&G=DnUtKtTFW8y5Z8>)-p>$sfV{MA>lsxToQtC ze(L<6J(=D}pa=SG)q$Fc7pMm_w4!FcxBhKGtEMLS4RFA z5BgrCdQU6eL@{FRp%&Zyd;7SgM{TF-Twp@$JG#boBhIi7tbm@;mglu9ve~r(Ez^~qvWK3R1IdKoWPU~1pgIy{jmmXAK&vcT9l%F<68%hm+2xax=7i}<_KLd}mg z?@|>+A$1R{;^11adt3WD=g(r#wp3ngggETSu;HaYDGO)?S zccY!-1=I+Z_Q&}mpT|`4g;dn)7|{AGuI@+SU$|@>6`|1yFZldt^le^_EGu?4e*=q* z;Hb%Vazdk+an={f^{(U(;Q{jKvU4lW;Lc-Q+ID*bis)oq?R`ESDDv1ES{@nm31z^; z+Q2}Hx%xRmc4-12v7{=x%_sm>kY0O+;sDJBmL0D6$DWT{J>|zhQam%&#q(Va5=T2x zw-CUgI+2_CbJGlns5vBaUd2ww@nMwUo&0v;1R+yonX`%6(?!@Wy8P-8qo-=xCC zcesa+1;HY!RLI(Y9093;@SavGR)%LlS$||0fz~14cZ-#0l1FPyqy%XWM94^2q1tOT z@#91Xy6J)&6MaX5-$&QV_`5qT(#2K|{Mj<^!5QU!J%gZdd+V(Z)MX zMZO3nj`h6zL!~7Y z5|!iS;VvdpF^uECF4$s(52XWXYq68XQlt^$^Ogu5r^|G)>?-ZOgyRdO^^_li=#~P8 z1I60yrSggc#lyKUK*yEsoD4o~;S#0A6wTAbNm0t|{C0Q`<@Jkd3GJj=$9qmED0M;l z-NrmNYl%Tv+So;ov3rVcn{QA2MkTaOjdQWJ=e;m)=BIeHvmd%1EvipHSS!pPti(0K zjW|09it~m$iNq1R+?koK5B!KU+-9-}S$Frik}V(J$nlP}b*Ng83x$#+v2Ba?xR)8w zV?*EazZXrsxX7nFBdR`EGe5afOCPSEd!tzDTa+xFoER)~rhZ?1dM#tYKSRQ&VG?Wt zjsPvDYX_}rmBMyaTBOU)BVO6GwtT53t%p>N1*(GL{6k`l4kG$| z+22#7cA4%sVY`HP{Cs;K{`g)p|#!O$8X-jVSIu@9Sf z+}Z~}Z`|5<-6KJePwNXs6gqeP$9p7R7P5M6uJB;=l-sihLB333az8@baqzK+E=6fG91GI+eOh;#%?(YDc51Fo%xzl!us z0t^CluUz{VYp#u%EK99K^~(O)&$)3b(-pXTd`RfhtbG+$kPH=}@&LF0P#8WFx{}~| zoKAFdB3kNIU}`2VVRY=?&y3y93r8v>E}{*NwKQ8C*TRIzNRMpO5)Rk8e0nrws14G@ z(schD+FC7(!o=h*5*Q%exwRf2E1YcDr{U&QS2IU9croz}V(E>03h}5ed{5OqwP~Fa zZn6YzVGHg3tC>+76FqZw{wR_w^VdK`elw$7dZu%6k)vdOCO7t5G|6rLoc7^` zd?ak~aJI+i#wVNc;Gi^w%bljMM<1$p(@ay$YQ~_-W8MuNv|50`x%Vir#zm^EjCjLf zr2lWf*{SvJQQM^5-5o8Sa4s@ygaRd&&~o~lEwW-IuxPj44%W)s1R88^W9?`CU&lPiAKl+q0MdznOyfSQT#Jk}F zF)wobY57Q~*GWP?z7%h@50r1|8d^&5A;yGg0bPez!(t1XKKtHy(CER0)vr`rg^=h= zMt&Y6Ko7>#qzh7IoEVd5mb(KsKxR@z)(3v?s&yzFZ)U~J>?~bIPDNP|25<)ZRqmVj z^X+8I>K}$%R9)}a-7F*Gv+?;Sr5$o!jJlYhJP)+U#Xw`50S)ctlHn_lF?kL z{n*T!ZReBG>&2+Xi@U|b`C;q>t}i%tMv@@%@FOlt9S~H2iA>pCs70YQ7h!sM!^w zRm(fed>EfInMW;<|rBknzxSb z#%Z}SOEW1amII+HszCxXj7Mw!asxA@5d#GZxUl+B?UfJ4v zWdWjmXzoVW?Bt$xa_(*(`ETYKK7q~T{U?A;1UvZjMEtY~-a{f0K&N0Nin~-TUD3;HD?VzeIYuL8|PhwGCE0VE+(?{M1f(*?rM~jY3m-u2`z< zydh244p+USNxiD}O9{FQPcSBoDLaGc-!Zx0bq!#tx~^OAd22KGq)Ej%8z8vteLs2# zUh;IYSvQxOvX7>ddL`6w2MpwMQerKqn4X&^#}kWQ#ydzGY*0;_3;w(-#)OV@$3JR? zG(2eX8+4u_;gpJb+x|L}z|$rbW3+_OcnI}7&SFfuV3jy+@q`Xf_IG)>7YUPT4D7sW z_g;8MC=XO`+>t74_n%~~@PzlfAKT4yxw%s?ZdY|J8Xw#oXeKakOJ8A*j-5@2dK1Y# zkQ?G+btgDw=TXkla@nZibTLF5L>PM$yClH)V9(KFTnf+|<;`r}pb|kjnQ2%vBHLmm z|HP`{4?8k)Ib{v*WJc=5| zU(2&OZii5uN}3mu^M*kW5<6OaXcIGF&Ws15L>T7$383SFn52DFQ{ z5kD)jb_VrRjy1&u2Xo(#gkO8?%ul?mh!S<{(qQL&8al?Vg3EH#=qTU7r@e?SVRL|K zT|_BhC&zZs)C#Hi-6*~op1!^^@FE$&A=OxqL+cb&;W;2XPg-|SXq_U1NYy}@)tSad zjwtK!IE|Cs9N!7E0BJK{-g#$?_fV#8c_+ps39gOq;f)r%6|9o<&THS%veNADqt5bO zVOZYcJMq=23WKyy*hB#?w{yG5ZLLJ_H}=MsSJw$v*ql$FEb8im4$qe)#Kl>8Y^ErM z#ioz2av>F$ZQ{>>;J&{vOsecVu94jdj*vZ3qF1o*!q}==cD?&O6qL-fq6}{eT-;x2 z{l2ki%PfOV{K>REXXe>#%~P_OeBGJ3eU}Kl3vhw#H1&+}!!7`=@-i;VE}}h98|eEp zVX!q{gNO@LHl#V{asm{a?PX)M9U@rjHTVxW+|E5UZs_Mg!Vh^DO*MPM%I&c*8wIQ?=V0TAar z%-PE-<9*M<&wQ0uG}-`FSpKH5Xg-Kk*~QL163upTh}z7+_(I%x51lpiJLn2wJqv&r z$=Bl#P|V4#hnwoj#5vu2=H#$u>$G2MW+PJLbqVj4{~(y(X{QWcUUa(A4uNYGt~y*!3Ed zO-ZsHN$$yKQn5P^bZ2|XGmfQ+d0Z=Ty*Bwpwq`u#pz29m?(d71$+>CTtkdvm%lVzz zthD6*PeETIjx`|B`Qa8?5MN|S%3_Uv4)RDMooxC$HfV}duVO!&26G8QEQ@@>I#Pt{Ke99{8lDTM#R7T zi$=L(Nh9ANMxRhCbWH-`Y@K_KIc+)(C$mu zj=03O{@+wea1RSsOrl4$`hx@~{}-E@W7V0KN;dvMZP;`!X3r$|hPp@Kw6x3paf-{} zyWmC6%@u7&!kBPDL4iQ$){w@0jO_wn>CL39QNE^2RU(PQ78#slOK4Zd#J2KBNUd|H z^B+FO*z3HyXkJ!#;eASHOqb0=+8x2DTdZU$H>cs0FaF2Qh1f=0~P9LV4_VbzPhz#tjD@JXiY~%JnQYI*RBatrBH_ z=ln77D-+w%C4#pklrLG8i2aS@SjSzX{~Bs$;&ty~uNCT@n;d&Gp)6bIF|FOM1cz)* zT78Lw!SykD*YH_)2V7U3%+%mIe4;oTQc_dMTB_)H%%VYHCux4jVLNG>j#ZXD1nA0D;zbwOWANNW)2 zjuIS{-5I-9#GQ>Kgif2Y@afr|b9VcMmr(1|hOLc=-VT^D=la;ASn)8n zmBCwXL+vwWt5GSp)1o9Lp59`q&0Qfq7@TT3%ijiLwy)+#Bb-bk&A8=HAijApNJ+Mjp$L7)#Bl&hyLfpUa>xIN95y{pjXM z1U%4ag6@Lk#_}B=_Rm5W7cAtFFV(tz*QfmSv+iiUES@lYR-4r%i9vO_@Q`mC~KNl|H%*6_By>aFO-pEk}}FkFQxY-z}#m+&6T!fOJInLe;K2 zC>+~APZlHfCvPp9rl7Wvxe$Wrwv&JUH_=!awm0^?4q?=P|L6 zJ!ptb+~n-+?flPR)c4DAl(<#TCNA&i@ln218KHkZm2^4<>uRJ>*IOvfKAtb4r0cnP ze=OQf%VvPoD}a>T%FG^wpBB(iO{d2SJCh?eQ+n^1C%R#L^!*uY%78MxJ$|nR{~}s; zqe$SiwDWP{hG(gq1pbLy73@fk++uLqSAU?F3@k`#_8MI^vU)e5>`*80&P|J5HzXU@ zGqbpAV`ODZwe!>6p@$Q@b$Y1v)(6I=;WrnIY>GaA{)`y|{p{O@a>vYEy8*|=*U83Y zx%y}41uX5kmN5R)_9k)X*Y(bmoqX8`uuqVF5jkAi4i^xZpT zuHgfuH8|19F4qr4vr&)i#sJm=bVKfq3B!{6KeeF&&{@O!H?3LY1#80^NVbDuV#TiG zw4E41Nj>j{%9+P^S|udd-nE%KWZh4t?ezKj*<&BwiP*nZCNvQLsozMx)Hc5+S8+ac zsWgW0mnnk3jV>jAX`Sue5Yig92%Nr0^}e%HJDQLBW>@m#ci_TL6Cr`7N3G1nV)(Jn zJ?zaf0pzWfp4JVQ!;zQU_SWxr?Dc4b(?MsYI{%sByQ7za(9%w^>)RS6ffD_`OND5f z1kXV+J=zO=_o@84E3UHSILTS4_leBu3paPB3!RT!u_l9T6eP;Sh2hkw9$2;;^(|A+HAGr3bidx(?LSgQ`fSgom@S+4!eisvgcZ_nHL?z&9DImRylR3ixgrJV zm6o8=j03cRIF?je3Av||By?rtnxL7bSXdCtF3lCtQS=gL-NvFe(}5WI)RRvFpN5kA z5dJbdSF{)@XWgE4+>>MAax4EPMjYg5FGf|wBysF)8)5p|99G)DXWPZDkMaQc?p)^d zkIGD9ra;I`AzG$82$C|+1#*-|f0S##fz@dtSg`3IX9%=YqckmsSQe%OEKo!%@HC zbLeNVn4x|%uFvZZMg3vTfJ+xpKR2>%ZUoa0419Y5+`^yfsI9(>#0Vl(EVjQco>6Ki z_OhS%c|M2uN}N1=icw=rhLj9S$d-gMQ0P1V^hGwt{=~t_THP5m@h7^3c2k~hqGGOh z>fDYPrKKhMZtsi%e_1hIwWtzNy*B&ig&2UBy|H{FgG^}X8i+_B7Tsf)eQLAt?w*|( zYqhY6FFuU#0N|G6{gi0(%79d-LDG`?)+&WKJ(M~$zAf_whPDVn-RK*_-&+Hd&r|gF=-uU zV}rpgR7E$9r&^h~NTbPYB?yV^MszOE!P%DFEW#ueTNBTbCw=;Mb^Q!Nt_xDe0) z)XO!si6G!eYI$vdwWd3p{K(u;pRVa#`Klv6@*y{IvZJqkzqf?{|9KqA>O&McX$+iI0(Y!uQ zCdw~Z<(9e2wF`jc(HUV76!yr9D0`v_!Wp%Pbcu`2wsq&42v)uGOA9o=9_!8R4MOvY z15kmk7Hty}yf#G)VE0oDSn*t0#DbYsg(_>>u%q}FY;vI3L$g!hKl93zBtsJ~sAAU! z&6g^eznyVUjAksiM|Qs`tnpkGsnE#MJEQYnb@pJU!Pa~B9vPKb5ycP9S<>hVL4F*l z4P($)4F58fU1(IGpEm362W@Bc{r&NR`f-s9?Cku7$wSY$>|bC0fa)3?a|CVe-r~W* zzT z+k_R=_dFN9bKU$%viU^IBV)J8D}%Rwe~u(Eb~e0e&}vF)x_hJnNU81DjD`3^0bl)c zNq%Q$$a?IdZOLGGwwkr^pokh>)5Epi)hafp0`G_8Lp-~Cn=1y3IatB84tX>t76P|N7p>%b`kx9E*YeFWUzL4L)J9_uEo#s>Wz5CVX2Z7Q^N~z4jeas1`IF4*nI3 z9CZZQ^179+$Vcar#L#bBPk&8Ip;`BzWfh=lEie|s#7)y`*pXIPKeDUhON9WF1D*Ii z!k~NP#>pw2c4mqu=>zfEht`yQ;eA6A+`x3Dt>Nwwj2%)$BgDVZ5v(i0#($!lQN!~b zS)`_c7%bf>{`XE#wcSNJ4!`kk28=lJT!+J>5n>QfWAJ-@p=M2@FKX4^ zAcDWn%x%Gcz-N%R(29)*kbilI793ne7gY7rx~FL$F9R5z4la+9zCG#3-<;Fb7f?E` z@qQ?|^K2!c?Wp%5Is8*Y5RpvbyyMm+RW1_8lU&T}=6Y=*l%a2ZWzQDS@B7!O5~pLb zq(HJVxZ|0bkE4sP!sT)SaVxqwZ~ zu2OxlPU~N0>*{uD02_G`x5(52PliO<*-cb=lfGXy(QjM;nHpVzW5w)74@bk+j|kj| zY`)~Eb%f~8-d`Y&`H_k(e`w*0z(%p`N0MApszrCk_aa0J%^g=*rN{*VoIdY-q^$3p z&oln3S_uXkR65~JvgcZyOiN%BeH0qj zqXrOw?f^Cy9ZMLPA3V);aDgmZBDnF%_lJj_QVu3Eo3bwpp9Lt&5gmD-IQClR3}!-& zAzgfOqx#9AcdabZD0ytvfTS`Rm(eRKjg!A5HIoNo@&8!DBn5R^wQf{r^hqfFU4JrO zFxdX3-!$*CKbL|*!`4$?8ta{t%tUJ`0xaxYW~XKSYb^Uutmyc&XkWcdX|JuzZO+;c zUcj_)+XVfqI*GrYY)B6L_nipS-nQ%XB@s6Nma3x%d+M8I1q}CB?nxxyF7iqFL3U5M z-hjQwz9TRO+{ND4Ju5K*tEJ<7Axifh@u;nduU2KogY@3L(;Dg9b-H03@8&w-?aHZ} zftydZ0mF|&`F)&zk7oNVZbZE3+0XsuhxZ}LW;6ZlvN{AY=YHg8m=VVaF)3rum(+^c zxs2q!x-b{YuJ~+aTr`T=hOJ}YT~gUJMltg%U;)gW&2!sPagY+KpUJ-u40FlY3+ceK zH8RDA)e}qCk@T&vh{0>LT3N@NktHienLBfiNc!l=Hs>(s7F3SNZP8la|K$R75Sp@$ z>!k+0zD1G@N#>+1gS2nRbmU7Mt2{-ccN&%|#u4y5e7*z#fYiHp7-N6QFyO($ji#^dK)%|+)Q1j)isg$634@@gz8=gIt^5}EpqVA?VRQd8Ns~-X+ zw9euxGfJGexhdcn8{X5}6Tf9KR{ge9vujC8j58{-^4DHn)*n$kDzp~6dCuu#B^7CvYovGQErsmIWMYgoQ6lU%Fk(5#k zJw-X>oM8n(yE*O2Uo5^_!wF7qh?Al7+?p8QRu;V0?O2e%B{kVd^bOF9*!LMF3>c%zZXCJmQiZ<-I6$|&LH$xNF&GM z-kg(?r!#L`2XM6YxxiBVa z$^4Y>lH3aWC`JCj9q#82$x7~xSA5%tHA+&q?qv_Y=nK-kJubOq&DLbad@$YG9CcZ& zWTPd&D_n9Wrs{$z$Yl-T>LlO%*)!WP`EvG0j=Br9-Sm&$hv4`zSYBziVGK(P_HFhH z5orwVL1@ve(#i0be3VV%)=5)?7i_q+G|4#bR#Y#;u5k_z$7jvewMKt2d{{uQy0RI_ z(;s)Fxw(tm2QOIE^EFBI!OhDLS5%0NlD7_}(1Yz%_u@#EStaG>#BjQLN$$=;2UIJQj}c1QaRzvlW)}5I zC1wvoi4z8wUliVdBL$Ss&m=2iosJggMi8>H=~KbvN<5)rP1(l54P=gKQ03z^OLBO z`E{T=KYky;dYQ|QvA!ac%CUL#pOIE2e8(mvN)W3l{1>u>nNkAl-$k(hm~lUAnEXdBOco_q(mkryPoU1An$Jho6czy~bs-r=aeii43PajN0uNhU!IbR>TZ8 zG~LcnEasga5(?7JCRaf61reL662SeoWB2@@Sy`*tqC?cJqF>jQ$Qq=5T(Oyypv(MI zuXvE>EbENek(?yJW`R{{j|mD2a{0{2%8_$}^|H*(z?-p`;Gy^NrJ{j|%u9BtQ>!d) zZ8c+22SD3(+N(daUBojCSS`CXt1sLbeL3RY51ZBPag`g?{eF%JQ0fmcf@qAzKN(iR zHQC*49GthF|JeUlu}){Uh;lZym;N^W@7OzsjUqAG06Nc~-z<}ZTQB5%c~+uFBQ_ON z6~=*kX2D){eJA&AhlD$ESLcg5j48D)W%&-0-v&}ldz9ifa>A-)Ya6;76?552^3Bs{ zS^(=@g!v=~U(M`H=Yi^0B+zElQtiGhi(K` zqSrXXye?}Xa)>R_oSHCLX#{bNrM;6yEq2f;vmf{dwYRLUC;rkUbtI3@ZzX7BRj=eI z7}mb|(Kd)u`tf*IHHtUkUGK!O6sV(@75At)L}gl9omc$%j93LAk*J9-ghinz0j{NE zX(eP#r**A~jS^(5@P+ z$Zjixxs$~n{A|ikb5CS|VT;4wdUv-RdV;1D%%Hsipx)IW1+6EBUc5i2x&hQ^N8Nn_v!fWp~Ph7SGvqFc`=XZ zLW4h#s@p~*54H8`ZwrHdIjd~*VVmv-`5zJ`N8by~pF5BbQ6+-eh@;tq+U$NWT`tx} z++yS3qGPLinn8PnD*JoI^}_?d^p!^(w*)0Oikg@yG=A0zX6EK=AfPFRI-d|Q-{&6* zPEo}lI}^s%{mzkOyaHvB_$_NTpK7oO3--lgK_U%1N-$ii>`o_rah8;leR|Yig!+LgzYuZp8nxQuk{QlcmkhXp;NQxN}co8hK5-hb;ej zTxEWF$KXb6%SuUa=iOeG`3n?Y_8(FhuTUg8#$bO908Io-sj|fQJkGlKG)t&LnY{TyEJTShN{jvcK@8}QCx6u>?7E#&@`;m^AbGAu7p};z za8p@9{i5a8tU+z?!dS}nvZ0iIvzB!6YUW0^5qC`Ep)(+1d|gkJyh+RM${%7HuME-d z34G=C1~%tvoIvd4+xzdNdY6_?2__fb4FzdxZII(wjAkPNs z0x8hnN5j}~o&8r@|Fb#s>;Rd1BU|_q%}eb(Or#sl*)Oa}>4GUI9zT%;qM0ivdwkr9 zKX2tQ%m*5z0sy(kyh3Q*zp3e5&p<(x;G^NSZ~ux6c-PGAte&0U;v1eAv`V#_-9UVVIW91NVb`l9@_0FWkO%Q3D?U{{=>?y96ZO(sLg%!>9pAp7Oh6p*UeS!> zT`iVa;2gUf2l#6{K#;Lze$D55?U-WYgTlBxu$s#`469)+r%rw#H{&sn^-v5krH~)R@fA#p5*rVxe;e!ftEIM$A2>(4y-YrrC8fATHO9vBiZ<}W6{~~f;5;+u3yBaX8Wp24! z??gL|r<2gqx7>gA5{Or*J=`pAIghmE9VR?&HAtFSH7Nl zX3{Fr^{aK#%|Zzp2LZkK4Es4MU&~BoV)jtAnUro~-LKzTuk`_IQwgYt2X59={s3gk6t ziYG0Xqa(FPq}-(#jj9*kn{t8}x%3EsCEfA~9bWtYL~avu1^tck8&*L3gNOO3#RT;% z>q(i`kc0?K$d5Mv(^J9Z8$Yl;Hd?LHEBe-uz$=FxxO;!!GTCK3pV=)qqlCxKy(W9R zN^j(|LmD106P@)~oK=TgYB84VDF@gaL>=&|r4Xv=vv{P=V@*{oxpn$1Odq9qAGOUC zcJ4RM?fHwnABC)OVX$vB)V}=0sUGoVkGfsZ9^S^chu8h={pfm| z;DPgi+Ps^T4G4JJ8MO2rNrZ?!5@bSf>L&u0%ovB4jIr{mgx-}apY54>7%&`M`?>Pj zTNpQ#$4Sam$B=}%DPTs!kR(F7_oGfrqbpw&F5t27Ti3X?WyAM=b@tm!a8f+D@ZS~W zh43J1vU1#0Mnfw4hW!;(4I?L|9RK?@H37aMf9K93%e5+saQZgdq5_Igj;~RJz|u11v;XEL?mR z`RPF?#V%2<9Q3ehPx-&j$Z@E&b|ipDKi!&suN44u*wIkE*_!LsaPIiLF+tL z+nLkhBmJb-3@P=QU|_Ro2f3B@!_DR??{EI=tF$#F9lT%%f@z7aYvZ1#@V4!zKYJaFS((YjM{B{2ItdgB$0g*AFO+$FS>F$axb+krGt_fS|`)1QiC^7eTC zOua@o!N0BylI-XR=H=)M={sRX%O*CN@OF4tJGRM6F*>GJ-M;13Rg&c@Iya42!)pyG zqxoRCyT)+&KbUX&hy!tgP_#h1oU;Wn=e?fPh{H6++PUmB!R-dS6Oz}OBY4N3+kk2F z5;sX;h~c=Vbw2Q=BM+cqx_4{*lW%zj{n#;s(|zcQ!_A;@Ze}$vZ*}HQU~c~UJ{;}; zjo19X;rn_PvT)MC{}e+-0ZP&$L&h3I2YIc(NC=F>Y7L6%w>D$W2P*l6n2Do0f5l@= zNX5E22C@yMXRm;)JTbytxCvxORJZ+zRF7-dq_6eemmA4Hc7tuH2~fd8UWwCup~0cf zZzn%%IqsP1#)W1%F?sf$!6A!CaNcA!s%xdNGu$4^r=_PMSwrPi4c|cx0b?nV(q7>G^>Nxc1vr3|7XB-x*`7p?JXB$epVF0^ zw0@!Ic8|Sq??-JMKgu{+ZRPm@w^Y4u-)bLC4itqCX_+Dr=&x}3RFLoj*8$n26oJg9JfEV$ zya0Q)LyfG`!MneT&ZvkPvO^IrSm^s|ObN=?Grtg}tWBm{QagtI1Tj}l= z7P3kg5e?(M&EvET6cwD?a|wlKieqe@VdGo8h9nK;{LS~n67BMGS5g;3R_2g~`J0gw z4BQ&qtZ6io|4#}y??RlFSDU>?Rg!tC3o1$NXEJz{!8cY4g)F$5Blm)XFwPot=Z?f^YVG8CEj1zfdw6(5f+vJ z*hJ(YkC4^OJ}XA#P+D-l(x+n=@`DP!;FheiM}fI=xZT#zKL}`X&Gnl#Da3c0edN|z z`q0VM;XJ?1QYh@SeSCjHW_IL2<3Eq}WbQf7uN z-HwNdvF?;$OkL4bx9x(3jyT>WJ8hVBF@Ke8x+5S5wnCiOJbIeH&5hL+gMeCUVT%<1 z3sSx#`ju_PhyJ5^wHRwf#DJ-DK%`C zT#xQ6)uk>YPgN<^s;D=X%UA)+s$CT3v)@mcruQW-VKYSGeT}Ymeu-3seTrFXU-&jh zhqJ_gJO(?jw@1dp+6z`?bx}nQ8@?tcV<+eJv8XzW%~V=mgv7tbPj>i>Q~d`v?#dB2 zs}kk(GgQyJO#1YAH*>On7`Dt>*EO>Kllb!BNLW(W9qXx0@({C(RKda>2Gza?XuKoi zQyc=G^i#*yNG!2zkcOv%q+t~Uz3@UY5ruSb<4Zq)}f1F;aJ$Eq5_p% z56YAPtERXttL8H8i-#!9_3k+32(D!K9i0~Q1!b~R8;tk6Et0QOaJ$cWFO-s;c5mqI zdkg9f1w!csW+Z>R5jR|bp&I-PxCX|GnSf$>oah`|fZ=y_iff+O*?r!7*yc;E zSe}y07fm;~8$xre;{QaEp!Q}Ft>S6X&zIlg&hi(j5MSCi(iXe6`{yyoxVyZ*<^rtw z4eNcffE-Pn&*qZFMDNnACy9)MZlW30<5nQg_QGuJ@noAUYLS|dOfckho)I_+ez13^C2QJWw9!0yfuoyvWo?pqwZ|AXWJf*3cch9}!YmT)?Y_9N zd5O)Dy0R3;H+&RsC{8n`-+JpoF|W##_Pcjfdiy(yCzTONOFwbY0mNeB&+=Ga4Rz!8 zx~Xo=YR)PL`_l%d?8A+wdr={_Bb)m&^rr%-jhfjZ`eBJ9fg!c$S*MZ&C43hN$M;GF zptfV|Jgciv#pk773dx3yQ0>?JjqTbjkwEa2&uPqOWw8d(73{3TIU7sckS26QmKQi> zXGTxup|_wdEoT?jkbNi|t#tG_Mbf9%4$Nb+&-(7jZaA6xv&A@o`GMQ4?W9TwDQ`5r z>Im=6svuOCob`Marwe#DfqInBTurnv=nn9m$CpsFCzf1sYiWWPy6b=uMYbcA^B2cq zxtv0{gW@A%AUi<_pVF*oQMH0#GvVk3y{nvsw`NShE7X+Gnn8SoCOTZ-I&!~HCcgri z*^nI!4(@=4zSPi~VmXAuz+58o(22}QV2ov4s6#A+qOJQ9n-9YdF#soHNPz^(vgzroa+v*~K^qwMO(g zxlez^x-b%CG0g#t)g~XZHSSwJ%yTupaOGv7js7Jsh8T{xQICt_iI3`+T(~pIIC;BJ zve!8;-}J%7#}7Vj5SjE9qt~hM45AV4ogb4+$LvGl@mk4geiWK=pzkJse?Z^9awEw z*5g{0SSOgP<(9Fav{x=jPH%dhbv5a<gNEtyzsfr2?i@=Yxw-V#t`ME z$PxpFPFZIOZ!f=Q%>d*pes52J2=-;IWIT$QO658mxX!muH*nL z#%T649=oz}l+R3n-vjxk=%Y^8D))PYZDShW+YK2>Hsqm9{TARe<_mHUx=zA6PKc@FBG1ESSaT9TjdneYjncTq}9Uh|n10#8C8f1-P+JY4>N3QrYlx&VRRL}*z z(jV~c}pMGhpsCTm-y^eV(+8rtYit$0=9L-eQW&NI2y)95uiZ@V4Z)rVTU@R^fb&U>ATdoOV>^K@36!0r>SRGQd^@2nX)866E$sXqifb}|tS03PmwnbzC0J)h0s=+?TMfOz z&2W8Q&5*^J|GAmFSg*IDILo;rj)Q*eQ;USC9nDb82^%gs%-uwm8yt7C@T9*oyUna| zV;5(>vr9y-?IEAn=6=%%_~Z9ZWiCLu4jfeg%IHw?qa0?Bv=*k_#EQnbo?Qo?MZksr z*p@Xks^j_?E&Bgy0hoXw%>(X$VLf3$paunFKHC9?ugk0E-xf5-h)tjz z)+!dakflDzrpK|+msVev3t>|DRbg#pI>EJ`vje8M{Xn$)3-F@X-a*{F3uvq8k$9Y zYJ%Ta(_@`XgtGYi&8*BsD)V7Y+TrJ~RtHuXc8*w3^?#-nqoHEYYZrQhI>XKz<<>+H z?=}=(aX0w|up(v~UL7};VX^VJ@2H#KWw}Map_ac884+NVT>Q0vrk}HM`aS&|!-w&V z5=@yzX|H&1`_YUrk`1^qHd8dFlGKms2yQ)imcsFAyWiWj#a%tfoodm^fKnrq?)${w z(7CQrdrf7XXN3z0Ye*1%iwSk;jq10lU`GU{I7WbG z=ZhC2Me@!UhQ~?o^*+)xLIcw-?f&l3x0CoI2g429Zz2bwCiEnE;r%|dI%Ey`kM^eC z5$Tx{UgQP@-qO3QP}noTYn9ei@SNJY(`=o7CCs|ESg(ayOHt!NtVG z$f&MOAchJZGAWO4^3fI{Qz8?-9kcimi}f%5^`}dy*Bg{8?D#4acC&$DE6)e&^Ax_C zJVDY>nC9Mnc&ne#ZN;=$lB1w|`{>T=5 z6uJmCHz^|B&Dz}9ar%-oq&c1J@j>Pl=- z?T*5bI~MEwNbgz!UuBrG+Me;@(|wnD%2pO2&V49zy76E9zF=W5`e;cS+M!`d=`Hlz zfj(VDj3k4esP!vIRF7+>NAyNm5^4$;kSx8cl41@#rYH?Z#k8SH((P>8R!HNW!dfi5 zqIen>J8gXJD>D+L`9`w19=ZF;gUe_}l_e}*ypTF%H)iZ_AK6^s;zBnbSRjOm+7;Pz^$#n{9to^U-v=Zjhtc#(H?m zpo;XBhN0O5>#t|?aLzzJo`-!JMn)B4F^lgy%SV}qL#glT@aVJ_3dAU5u~@f>WaHA* zzHR7vu15Qj(V|?gM7S*%J=r=i_D6mJuJG_`1r{p>?kR-b_|EOA{ruhm8a^z_jD!)!-txc8^|v{uyCg&41C&C8%AjeZc&{AOJ_!^#=YMMeAt3Sgkb!A?-ge}5%u44 zZr#L8{%#{8A7JR*OT(<6ONiNfaP^;}5qLu=y$ZFu_98=>rc zZ89XYxDWP4vktG&8=qB!gPHTrSAHY$dEe33w+)n{H?Ayhf{sbFgteRw8S{XqpyCo= zNLZK#{Lygk`{ZB1@cygCOr+yxVA5#<lcqqQ675X@7T}Z3gHSu1FYl!ot~*W zmLiOE#5K;eq26mNqoNYucJq+T(_*F90a^tziLQM`wkb&=Qi@Wyz?>h*#`p(m`kE7( zrxRmumsC_;dqkoBQw;vs3e&!!wa1s7tUonfZoC!fNLTS@v*(u&XtWijC#1roaOyCZ zCTe8$7M!Kxth>xU|IxGI2*tv`8nM%_cc8yj_lW+`Be#Tto?Cd>qQbyya2x2&oy?^T ze{+z@q=i_k_K^;hlD21OQMOu4WPB73c%NoWx9Xnyde!O?`pv#fkWuU{n30j2NX()K z`dPkoN@kzWC4wsE-a=jd%1qryVd=uOcDOJ3A#b(~^EVV({@VpezPbZF@i=!mC%&^S zTz;!@P?rI%M@dKgUi1we`RO%)N#|%Zy2#3%r_@hha}Vu2`P8X>{<`7y8@q`|O85w<2?N!k zlp&KM4J-NS-Q>8Wk>&kIqE# z0qBqte>dpRt>Zwv@`!>)kYqj4?fuAlA6NchLebH2usk6Me;dwnCN;a-z=(_rpj+tA ztsjp?y^iG-j{+6USTdY+b1`mWmg-c{llsqZ`8Ahg8e1xYFx~{UF<(Rk;HT}6C0^9- zOkxzAtoql!I-uJAD}e1B*>ffT*Vrl$(~HG==fM!SImBww=wxzk%vloUeQyVfd@!!nsj+sjOnJr@0PrhL37@Lm#aU``KA`-%oRxJZoED(k45k7eU5u7lTyMd8Wh9n zsp-T;V1$o(>M#+~J7tN!$X}M5s4J(rk-P2tgRcG_+aF(j(Zqj&-&4mN9m%l-ig<#n zD8kHW46DRuqYK;$^vF8u7tPyaXnrC+n%#kc<+FCB>`9V|tYm5)BJOurscXm0L*~Vs z&mX@N${(;>G}}Cko9N?XNYVi%PcN9W$-dK@jL_#8+w5niGkT|;I$96SBxN41B&$Q% zi3GutwZc`txiAAXw4fr&Zf9(%s^6U1jXhmWtGsFYwkU@ z)+RePNqat3AOCYpu+W3?)1jIqEnOVRAXk)PgTx`(D@D{`k&`VuCRksl2Av$V06IPVB(F&)_x{ zj#-q$C!(?QOLwi6Xu&9yN{m4J@VD_$Khnpf;Ta_~$CS2T-y{^fkAzcuLiLby??2a2 zR|s2Ffv5?=WY+YW`g4kE*^VG^SsCS7fqSeFR|o%LI~bSAem{;IgoCuuM)M)g`~|$& zdJ+CsmqXc1=j-b%G@V4M=L0V%`_4=L@r^Sz$fceg#80@lc}o-YFb^h5Z&u}~`5pxO z0H{%pbv9F`(Mfq9e$zLW0g=7F&3!l8WJI`8QpYf-HM{k?(E|V@IEI?nC3mOkY!I&M zIw47K@@+z|>v5%x=>L(!2>CqTDhv2!z8Kb@$JQdP+lOVVRYaL(${M+|ME3vXykxm) z3|)9=eX{hfbS8F!sUPw_?Eq%(#jaz`L@-iOtL5HGe;7%LCYF`9p{N^jcN$)q>wt z0utWqi?RolN@Um6^dz+HF>Uhe7yGz?YUeb8t4>Wj!0R0M_AOvcRAU zSs*cP5P?{R&}DIwvdFH`J*CoF^Odvi<)uy!-tt%SWaw|v+2WM@Yx&5dpfCiZLuh?v z+VYFS8+2Z49tBwYT&)4u5G0fSY36o+=n*TP8Cf741LOY*LMM>_?nbC#gYhw+UfzhL z?MG=ntG}&j`Z_PaJ;pNUiSbX*ZWsI+jz(+dJcl|zsY7)&D`l;yHu-fPeJKBGG@r=u zJy1EQBKbRt?3EAD`{heCaSUzLIuRUGF1ad=m-&>=r!~b~B3pLt0mt35{MqYg!gfo} z9dXJoOhf*ynMOD|lI`?P;g3ZtbPG43lo9|I^zLeW{U6H^t5c;yG76=HEdJIToQ0B!j_Kjof(HFFhi1%hkhJGbuP3kdzTT2oC!_XW z33HJwCs6`y;c6(YJ-u7tg5|ZZIJv9u)9ee=2EdqTAQ6JdNAjCP2Pv${b5rzh{eAUQ zJ$Powwn#o>l2bWyrQ4d8u3MS5zV=F^i>yHzMgBvervU9P)qBKXj=8m*`?tDdU=HRoj&!|$B7I_4;I67N185S%El;>1~Hf$uZi#!$!U;> zIvkn)you>hXhlE4l$3y+7fw===%G{V zlN;4^Z4GXF@AE$gV^W;RgFgKyjxlJNKZB}#}laTr*d`|Mkp|# zR){W93IRH9+QpMUp}Pz;#`HG{ZJ;PH*A7pg(YIBdOVE{o8yTf`mJiV}?R z{4p5QADd5S($j{~o$7cFXcd3ukago#{&LZC4~2rYo+JsI^DE}l zQoatL^qN8SR9kdcQ79oBAKuFg5JN!I_06yPc5UKolS@e3BUG%4y%ki-rP(<h|bU-MN~D4kp(34Y5KQ(pFbZg zOamDd{6Y_XR<|whhuc>0ZwRlCL^3B$v>%Jweev^CPAJRVc=NLX-P1N3)H|o0QP`$b zc{LU#RiRz*f}KBnFx-_;&M!wYOTJ8Yq02)`7mGIQeVA$@c%hAfy|F=}(w7_Z<`T2Y zWH;uh|MKjI;yr+RGvj%CCsN!X*Yn)- zk`gz6z5_spM(u!y8)b2&oZ#^lJmqqLlw5XW5r>T37%mm(*LfpR{;rJ@3-J2Zfkp^; zJiv4~@h@4IYLu$xvK?0w*HR%>qz#$1jl-S9|I&ul_^FZ6?44)8 zGUwwoZ-nQ>m>)Kh7_|&iW+a)^VT*~Zb~<)CID80hthUi@Emc9C47tNyWpr{w@18$< zkk1CM({y3l0eN_?F> z&HG&eKY?UsR{L3#&Qm(}Z$(1NDcUvqUb>mn%c#3&|JqUvCY($LUA6v_Ux;* z2Cp1@_n*VwNR=4J@`DT%doroIlq~8KTbqOu+6~Cb*UlrkbTdsc-hnwMBg%YcMk-U)8NcVGPq$=`y*F7#B9ur>xPZoU1*t35;RC+2-%#T!O_?!!6cs zce$v_>-5=PJjZvRz9=aQid$j%Hup4C(Th6QJ(|(2G{@brKFE4Y#ZNBp0gC$1 z3&-;#4~QuFcBzF(gSBGK;q@Zp_4>#xtB7aCBi-xzT!@hhPKMh)3_>2yHRNRc#+F{S zKd_wenK>=!MAaWvVVxxi{GH8G0dBRI4(U9N^{<@*pan1M!eKrn0LAs4nfSbq$_& zzS^xK^#HX!+qkhd$ZmmCt%2aWq=15K9x2m{n}Kfz{EmgG+0#O(d$e>C zT_;w89r=L^_4cjyv>5G1Rs3tWn}id1&R5nNG-T7acBLgTYxT@?W%GbE{7PVurc?86 z9$h|jIVtT(Gu?S&fI7IzpW@cxD$4w}o)Sa*Pu##hPaLOL#Zz`Yx_SdTfCFw`5#y>k zYYiUFdsZ#l44+0VdO!A27tu`9qE<8Sy{^j9<{_|W>fidBpy(1w3!O+>I9i0`rO}#B zpzHCkmZ{S)%g6x{eG}a-%rZ2rS6XvQ{*%MzUr0;pYd)l-DhWuM&}`b2!h1woY-p{j z%EXj}c3)Oe(0ayB%j4N5zrndEjhyMGz0RsuYn`(#wGzVUBknW(T2U*M7(kff9lwl=<-5Q1hMd>^KjI*l!1y zcEBq#bjvrSOk94OQ@ej1FFP{Epj_w+5r1%Mo4sI>Dc+cH%grBBO`ks^nxJhE(}E0c~*+k3*j7#PlE zcg7xBo0^K z^xOrI(^zxe70=xn_cd30qcR%V!G|@X_~>LsIcIbKy!E{cYP?mJ>5ZE=zc`Cb1KV^c z%r?AJl3(~YFix$W=D8rX&#n$nL>vU5@FpDtFPU&^xLv2L%rJp6{f(j7ojtYe_o|1V)N3Y?^>wRemwDoK^n zPdCQpQLCv&evkbo72vb0#ro8t{%5H^+X(%IG2#L<^n0?j@h^Qm#V!Ne%(5ItLTx+r zSQh?akMLwm?m!DX5S`ggrAK7PK7Hi#6re3}WhWrz{rJCbp@;#cgAjfYtu>l!+kb-| zAIt`fnH^W({Cf6u|8!vj5^?F2-)>KdIA8nHk>pGahb1u{VMn;~EcZR1S247&yiqTN zuLPw$BQh!XiZRM(pUh6;iCh_ynIVU${Q@VC8g!Dw_D+y8Lx>{2IXC+r{zWp^K35dvy?k*#Hb?L@$U2C;h>JvhC>NCWynUY*El_T(}-ZjeoC7g2u z#0DnB4hM)4A`&WzOrit{RhGd`RtHI0a;HNCoRKcuKz% zhR!8{E+a?;0;fUV*aVf9RD{P=E^x zN)We;d*Gh3@Dl{XJ20z>r|cy1ILZ7ouVrA%;?DCOeOkS*M0B#^FWIRRIG`RNa8POO zN(=B)3l`dto?kmuv#XXIiyZugD}YD?yx8_IIt#R3>Ga53|64)+?jJRqgwj|OKdalN zhR7b}d?|*AJv&}`#HK=4kQ1)Mg68zx=EE6QkhI^gb>A-Bq03PmX>zKIIW_AEVGwBn zDPQvMw$$;Ao#pkcKIof!RRS?~1=buaDuDElyj-N@5W+r0zSMwRJ@8idRHQs)1FSon zkhc^!zyq^q`D{v(pe$vF4-e}}b}0GDw^)9sqr8e&#v-dA;VR*3d#asb;pj13ius}= z3>hHazmu5A*nNjc?L9Ze&4OOdoaLz(w|oCpHqJli;yG@Ik=V+*)ETlzgL#w%?yG

Xvjbn*0Cb+k75P=)yB3b@IT)3GCpyFr~0)E9hoxt^9AKEw&Ry~zvx9xK9DQ@#^O8wc9XXEWJ9*u z10Il}vo>`m9Rj294d7f(SJ}b1L*FsC+`z!2^pymDMFi;4QAtXVUP-wWi6a8brFg+G zFF<}=xh`;fje^WP-}H_;O0|=^{EKz)W)ko^1oY-of;&tb1eELyR5`VgnHll+EA;JG zqD**b7~==IVb4#6K%>9V!;QK6g4vPTFASJ-^*_Q}$minCWH4I(8~6kD6+&!&6+i?l zB{~DDpf2JFP$dhuqBB>q$zEHDE<6B4j=w4yDx&jXUhnb|u|SXX`)9xj+!V}N2oYWm z#NASK_;`_Y=PLle?w{lf;JLodZV|~AK0q*W{jZBLZ|e~P@~x%tfh=x%-df%lHt@b! zTHnQD-Rw?MMJhZD;4gJ)g|rZ*z@p?}Hge7XjKa0j`2zAVc-eyVfqZOl(YJ(u0Cv|x zXh7HD6qDvmbZE&xGV${BG!aPnM?docGMR0wtv%S0wC1V{Nt|hYn`8wDTKv8j?Z!lv zw=R_-c|(|kcj6BWJAymIX#w6e>IX{(-2sdRRiekKt`@dYjh00D?spgF)sD2$5C)>c z9U}s$7_pIH0C;#$O$FRWs0s>&#Z!xqUwBtjP*(=0<@AJG=C=?JfU9(~03TGql){A} zZuPwpeio$gWV0_CKQH&^>Hf2#0S3qVR4;LiVUxg-?3U}^Hcj{glJu5XPzIE@)Dj$9 z>X#Lkn?PB;19QX|FdIAukd{Nf4sf6!>Jr_`YM?MZ214u!G0A`+=P=NND1LeRMSLQa z8c_EKCIgIGrp3g zA-3frdM|VH;l;))qd!db3iArT5DK7EP+ytjsz0U!2MWvuiAlfvcg;^2f@CR01Pu8; z2B`yz39Ae`pE_6TTKBsFagx0)zZDrjc`J{8{)=n5UNozKa?7 z*xSV+|F%7R-~)plsDb`p0Kn(ofG7MB*NZk_UO<9OvBzq>Bx}LE6+j3>diK!vk<0ma z5IF!^&;^bKb=#odziH>i_fl}qS37Y-BCoJ~;|5tlQ$0Q@r2-7IYRygOb<62+2o?XA z5-Ool7jU`>N%5tpDtm_OH@}-VpF7xY-Yl%1=;u!31Nxw8L~vW!b;5qgfsSHxvoPq! zqq8{sJVWZGHU3Hcf*X^RHztXb5KjQU1#^Z_^Sf3DD&c00&svyl5CL4t5;E4_?BTsY zCZ~QdM_*7xbOpigYLv?xfc>t6ny*Raj>d@IQ(4MC_m+q322QGN|2UYFI#R`C2{9Xs zDjlf|2i2 zexkS&Z}M57c?>J;{aMXXNm?Z|XGbKPfof|g^xyKq%f6M!GxN|qVtjG{3J}wWpDuem z23lqkzu~@$AMAL05Us5dPu?kL%D}#qN!_Yw`&@D1!Sc^dem@i*s-5nx)TnkT4d&g& z+86Y~b<3r04cVa{4n%k;0&kkKMLj(4-Y;t6?`nYy33UaSo>=lnslU*A4Om8sxFSy$l%iR5!H_u$>Z)kNs+Lqy#+Z!MRQjug)4V(*t3ne>! z;-v1Ju=f4}*L#T%h>G9_$C(sBVergeCSOFgel-V*fqlSRBAdbMAMe`j!7(V2Re=jd zMFbnXas!j2#-{(rR7xmTuPa7vMbGnojt?;-jo3@JIh1FajWelrF?4AE_CkK02vD17 ztG)izOPsb3XUwt-mUR4BR91DPMQ1AWZ9*`gTr+DEbU?U+77bzd!;uD{hmpWcULi~3 z;;TLpi#Hc`%yERHRl;CQ%M+^@d`Jbn=zB_^z*Q-A%!BQdv*?9VvqAV#EvV&G>k>gq z8N9@@B5Rj;hws&Vw*trY1Xq|{oxH&b`E%h&cL(4{B|NX@ZRj15z5@ouFW360jL$hj z!;S}{d8S{HUb66M$5^ZF&jdWA?ko8n*trupj^Hl{89;BMxSNb2OE(p2>(PZKoHnS3 zK#{%zU5`Cwd-I_lAgtD>gXfB}2AB4E1mcX7L<%{}?)5>$3a!8*W@EOucmJ$2KXYp+ z=bBmfhN#rCVW9F+sn53i&a`C&w(<)dl!$(FRgKdTY%VU60h7P|n!IlhnJp{oFrSjv zs_NN-=Y!6;0RzT6iJ)Y_%3I%VXLo*LSoc1!aFmRJBB*O`l+UV>xh{HS6}(Fp1=-0U zgV8|IAqSM>m1JpbM>ghVYk>}XGj$RPQgvBo5n*DG2Upe~>2*wfd93t6ZY*(jj>$t9 z3IwK~U?>UWss!6A1F!7JGYI!}4??V0{?}2j6-j0{%Z8R;hg(4urFXw1UuDEggBK#s zUntk}9%mtyrj`-@APT^Hcv!h@w}4E+xwX$Xo1`)dO~~hQxBl(^{;4-s{j6nrLz|CP z-T2;zec4{*qlm=k?1j&7v)xFzK|a+b;d(-j{}P*eqUS@EWBmO77RaZ`PY`Uf)+|+y zY&X=N?_=6`Tjkbqru)C{&S8(<8Xu4Do%pL4y%94sCa)kdxBat~xc1Y>aA{4zD-^YX=2T0ikK(o6DB z7_tghOsFt7sp#I?`gNR6YdI#&O3D#EmC|3bW@jrhT)!SFp`xT~%qk6+dS>)|M2@^V zdU)#-#qf_^gHo+o*gjnG0pFA0#m*ed>q%$gkoC`uE}uuY%B^5a@=f7$K5?=mrGyFa z&G(1N`lE*jN|$(dTO9`7VO`#F=coM=wfheVfrn@ynk00MUmGwlW{Vci*cWa2EL@k< zJ<%nSCVB$e06m4dElae}=q}R7JMPnC$K1dIW!^8lIrLDc)zB76EYh9X?IUO5QKz^F zf}0)RGh|ua*$$K;qur+-S#~k@Y;Ju&ecRMDJ%G%Ve=yL#Zo;gjp12L8W7 zt~?y-uKmvvLo!B$Y*}XP+a#Hccxn`rltiQ~5wc~38Dq<&7;80Ury?zQ9%Cyp;oH-w zmoidgnS?RQh#^Z;vgG~tUcdi;*LD6p=Q`&;_x8EYx$h6Cgy+^Y&ql)IKx0^;>o?V| zyD;UDjCYA5$hrra-ZgBG|I2yqaQo)2`{_$^LDV1UW75p~&a}<9$cnse!Fc8w+H9n) zIN1Ov`K$ov?nVVaf*BeIotyFXR5JaVi9}f$sBCKn?gMc~jwPI(lL2@BwGPq;xF%z{a+zbarX+YN^x4RGb<7 zCm{S2af2+>K1Nqty|9dckha$ql@!-@i3+7I-$qRQ9Z6j=)X) zH$uHYBS2eUK^EDwSS*6PuLQRVx133Ion+DsL96voGpx5i>9sYe?Z$K^6KoYcrXe>~ zou`}{N!!Ugz^!#jO>QEfBV%xFxKdX`n7sAe4T;s4}|GUY!Op)^X6WuTvK{IK-rXr*j&!EHw^ z_&NdU@jD_Rp9ofMp3U(;VR%?n5TZXISL@=uUGewd_G~q^FF0whec+~%MtM^aSrzP} z;hM4QDe5b-^>N_EkA$&T4Fjv*uJ=3E>OXH8dvi0VrVQj@uzO+rdII8<%LK~W~baI*}rd<5t7&B__Y9wcc9tJ7-GHM$7^)=Ra|^q zo$i|ZwE(cR!#N=h zSE?_!I?>l$w9Ea>W_5&2o3Xc21+4S@_P$aQ-h zPlUBV!^sovr)H_)edR@{`^{ZM*xO1gejJAib}++&LjRR4ZLUF7d(GaK!tg z_i4uTwS3vO^)X%AOS)Jj@8uYJ#WWL}7nFzjSfui$m74jjf`ijx+IgR_Um!eSaUOJx zFQ+xcH1=c})ody+#2#QinBf&q+myhP{{H+^b*o5rT@Gu^oxhsEg5dVzuO?V+!y%u^ zc(njSK6xtIOQ`5HLyxYf>{b=9C_r;%T!b>}jpwHXSbOqV7t$eq2u*GBzt_!vZE$^R zp)HWt3YHVuyH$&h=Gdv8aN{xMjov77NWsH@FQ?d(SOSFB~i`R*=<$WpvWZUlig zO0!30_qWq_$Ry02VM(evv}6(MnM>*tFvJ%(a#?$zH+^c07meLX%}A0*Ks$(;9%QfT5| zhu9sry)b`W8$e`k4sD|1>tKG)#h&`QW+U$KBEp`1Ac0F!hnlx1wBvv+Dya$ft9`X2 z*zAWsujL=38|}G6ksH4zxTBRC9v9~f#%q8~_APE(@c~xDl;twj7YrbOaetc@PLAKF8rJT4JyLxl8 z*jljY_4yUQWgkH><;V+W?W)R=*n!x!1+RhE8V;TU7YEOqGv^ee#~gSe$DBqRVc1LI zcSTw?WvxIz2r~}c4mPOw?Zd={q3g;^UftX)exIdxDTE_bftG=N=Y(&reRZ1;ZE_9X zDbHOJd`}?G^zKUSHArbLK+Cdyfk$8zXVhVArZL2lJ`U9*uj`lX2y`SQWBOgLLrc}A z{f8EPmX~|sdIjG)Kh(lJ{UW2|1*vf)CRrZlIpe@y>?l*f#D9gfpYw6uVKVIU(g(!DJjH~ zeH2)I7?>xM)#A1B4$c}bl&qM zB0@2$z(nRjBy&7pE{Gs$3vfOKM1<${>Z1zBV^r%Yh2tmP+S_H+;SnsW zPkO{A3qybK4Wml258By-tbm3eQfTd>$_}dYZ-6pm5yRJtZVg6yLT+sz^K@(9P#;FE zgpyH2w^OtMA-FJVpF~2BSl6 zqMwWy0$=`!=_6JorI4knt-?c*@~iGPkVX1>_@JNwJo zTAX5VlRwa;hlz$n7m6aMgxPe8%xMv1p9=LwW)ZF5~+_l^VW(h)SJ*0X#cw(oG>t^ z3s|Q%092r~H5yGc#daD_&HvHeL0hCK$Hm^|U6!;tc&X`r@RN-M?mJsPT0p2eHWL0w zLF0R25aK3R>bSfdbx2YVY8f->#PH@*{lfzgh3M)IUw!08iZ-6k^#j)z+<)vz+c#3m ze)~qDT3DBMn!Rl`@Nq~AabOunvIV&ibu`tDiAF;a0f?arpzN~=ql%UYAC`#M!qc|~ z3ZL(h>VrHA%6Oa^3WU&%o6;2d?*6a^H_&Qw7gb4;3-Xm0mYkEM42VX3JOByg{1G9g z^l%QM!w^s3;|iYOp(Rbfk>EaD3p4<$KzsM;tl0fn%hlM*`@W1Ui)P$4zMSL<~KVG`;ubWjhtWq5Q^KX%so4%4MrzL=u+7_%tI!~r=1>eP;O;(Oq6wCZB zW?#4;ml^Q!(Y+GN_-LOY3I;Qf8oce6b36{_!qEdGfIuv8M8}P6_2sAF|S| zx$;<~@OcjzLrNsmvx*RBksx9{h({VwOH>FA+9U#K1HG}kNH`10;q-W|3l+W$_uC<~ z4LaiAAq;5z*X&^|XrjFA4Yx?pRjkzyvha_DV1h1%u;AB)uZW9USl03~;L~~nZN)vy zKP8~?y#bv6W&T!vEBh>?DmY$cWp_QMKk4%?eb7eDAqa&@8qkUpCLl| z{6LZ{Ns#{DR@2@RG%VqT#XDvCY9EulyY-Wk`bte# zsbmsU=q)|0Sp-Rfdf8zMtELhj3)@ns0TsvCPdRYAr|+1x=Ou2V2YM{oAw3f8{Twn7 zAK=3}7+^*W%TlJ@Md@~3d{`_Lhjx4S?~97|9o-GbW#XEdoM43*{YU1PMrA&w`&gD2 zNuM2z$web$cqp$Xp?6?~pCcydHrHgCzPLrseClqlUe@|hl8idgh-xc5*K<9iCT>0ZauxWk9Z@_3aNZ~uOM)ZHzE#Q*e@ z<+f*>-ZLsyY1wx1ct>!7u_|mFIjo?YFyLbjr<~8YfhIEkpHXeSDL1q))8NACC?3f33Lzdk=1_Og|H^*> D(4BF@ literal 0 HcmV?d00001 diff --git a/website/static/img/staticvfx.png b/website/static/img/staticvfx.png new file mode 100644 index 0000000000000000000000000000000000000000..41efd7f12005db1c67e168d1447eaa0596603e1b GIT binary patch literal 12912 zcmW-n2RN1Q8^>QGPUayY`^c(fhipd~@k1rD3n650vN~o~h$JN0J6YK~S(Tj?B71M) zfBSb`>Z*&*`##TefA8=2b3dV)>PqA!3?vAGkgF))x(9!D!2!20xH_kv1ynv?) z3Yt#$l(1|nw-rPMZ;Fau7vd8{5S~{quc}lDoORQJ)G*cOYh zT?5sf+$-rHi0?hUPZ@p?H&W%%dZ2dkdjI`SxraAC=lr$|iP?2fN@TW^axM7z_oVDs zMs^xjw`{#QcOi4Va{W(k&PdJmv3#y3)uyZ_TSGn`Z|kL3`1Qj~{Obwbo;<&amC}o; zPg`FZ=W}hx71z9r)DlWR8Ef|mz4kWc{2d!QyJofqdKnrn`bG`eTC$F@C}RHcPakgi zXvp}!z1Z>K$>2i;i$wABYGWVFKYD7JsBLG5b6C5cp6(8W?0Aq47|LXOf6A{=x3`t& zv(`-wj(;r~u&Gq3Ot+;&@o3#}+ve`laBvX0cr~xpD`xRY+#vDw{NSy7Rh5@%UkqKy zPI{4Povq8Wy=;*>h(P2E9xT5&8(aIURQv@J6Bk!0wKnNUY%TauZ% ztMx=;PV!_=zrMcy8f(sY@~`DVt?mbQn}Zw<)RG&oThwg?MCWA4=mm%#cRk@C{Az$j zMW7rPF%JLvwKbIpOTeo=jPknyEhtz=Z@b*udv%^TX7_jtj-@UI9)&b zp8T*db~$g15QDpM<3^xRqT1M}C#uBAqgBQ^>ldQ)cJV37Pf-}ODVe#sdH>&@zEL@ZdppmwBnVkB`jybnD9}T(K%j7Z=k$zuL0- z?iLt*#Cq%QEfF%O^Hm%wXaqrFvh(t;l@=B8ciUEYkp}oJEiJtyCsGJ&p3+>OQI5fJ zs9@<$c~q!5a5g;MM9;Y`lYg?|;KAfkc-WBdlXxF(%{)CN{`6_cKKC>+!CpoM5F%{a zc{kP<2l6=0OLbogD#mbN?$Vs|F+gG9KXFeYKUDwzJcS5@JCBiz;W&}R$Kd|^@4thg zLc>mDJ-y9F`U~NL{^&bpWo4Y?|GjsE;l&YRv+l z=IBwWz3)dxM&2(jE;1{o-zCLxFhsO5F*8g5Sy*^l>$PXQ9!N^(nxvL#uM$Iu4Ix!g zKv8A&T|r|FQqH!+SExXt_n}Y}nw{RXNk%n>FzM~vO;0Z`VJ3>;pUut9hyS*EQ&s=_ zkJ;ww(~iR2+^dVr%WAa_Gm3id?)#NdQBmaB5WI&32!~35NXw|^Tk=CzjitCC5B#7w zL5ohK0xImtmn8*yzEe6j`A|WXmEB5S{^L0VN zA6LWJKcq2G5z7Bs7h>EtObZ@-3R{3f%c1BBA%F=yE?nMSzM(dQEQe;ut-ZGwdLu7Z41S=MmM$!3kx^a@3->%T%e;9 zD>iM9Tvhy%(fTmW7F@w#$&CJYPC6w58&_v4r?m8b|;okPPK@7+F5(-50 zeNoXGzU*MdiY^<@CRQ-`c4ZCM`;;hS1kO=bp9-OTL%1=^O>a#vbm#GKro;Dt&wZQe z71~H~(F6IzrLQQ>7s1ZMJx>3*?(^;VMCUxY%!Hf4Bx~)%wHiy z(2>L;f<)nVZP(i9@1+{`qs0yIE=2<6$l-FpikQ@uR$kqRF+ z65yaSB@<{XT$|rwt8b?a3mHqTQh$2uu0gQ($cpPghH8TAr3l5Hy5~oHD-*TVKYM%a zBA=0r=GVV2e#!0vMfXG~kwYbh2^)eS_y`j60d*e3j#YM+NYovqQRjSYBHd7! z7Oh@14t)WOS`jg>AjY9zMS%LusNm4NDE!P6Dk`iWPnZn_Vj7OZXrc+>Nl3oQLcP(9 zy{dG;!$pfHtDl<<^}8|RM{mj<%PEg8<_W{cqJ(cxuLj%H+3=$<>}~XNsB>Qmc^^C? zA;0v!N2UOAKw6_&!vU(?suceAMpiv_LGTmRwRBt7?CDf8)$u`z@7WJx!6knlL0y1RGpDlkNtJKEa|IM~|qHPqgIc`a06 zEj(vhP9-L~qGIm@PuS+bzynHoGzyEd7J2)$KwGjSutgODHLqkK~QtEs9PyuX$j~&FBzB&EvS$U8*;G`7EZaR-gSTr|Z|RSMheS~4B&{=q|#{edE%9c@=v zS25C}hPS^3DE|ry2~pqI(W%HtPrtmfyE5t-+d*?hB}OfVgNB}7Jbl0_0wj67wY8Nw zN%Yy5w0k*oD9oQ05{qfYmu>IW=E?z2AM{;>s3&SQ1r#VDn+f_TC*750(3x_Wy{ z%dPqT{x~92IMEJRY}0QI-@`t)n3$Nuk&K#UIn>?f&!4YWuhkvZZftCzzSP0Vt=yRZ zk>0ly*Ct(tag+hp)Y-43*CF%J1_?b?j!NOR*@2Leb1(SHz z56euj5}Y9c^uIS~@xcfi}$oZ4Huu-yc1aTLSUT9zSl3@*%3(v3vHc zyT*R1sjr}*z~ky`tH-0|&$@s7_>rEHlH%$^IrsN3K~95J4yW_%B{Wt*R+e?yUc4sm zPR4^AExo6b7m$Wof9OU@#UnNoXsjc+qc@qEnJXxal$h8aUunJ;9(Zu&m_HBh-@otv zZ-0YeXJ==$t*wn65BU%k)f;2LM|U3cyC+%lOMHBMz~SK`Gc=myUb9FTr#zcmOCVu_;wUP?j;dXQ_J`O#yR8()YJDXs%OGDk_*&(sg5F({0^i3xjtMh0hIPR^CA@^aVU4zn=y?Yk#u=3%_X!^ulqTf4WR zLGI_zpD|(xQdV9*2le)JW_mhk;g11=->o30p`mfd*w~o*K0Sh5xpL)cWO%p;Nr2zp z%4mhH?}9k{06IQhUT#ZLRD++Nf61SKSVmu0ccaq#U{k$%e6_T=`1ffeF>B46l#~f$ zZEaT-c!wA-zwq{ht-q-tJG4_PVN|pun9ihZIjFcUqdXq0i5A*-T!g!hY}dJ=yxJr>{@y zEHVCP^7MG}v|a(uu}nX{J}F~QN=i#Sp! zp6VfvH%1_c4A6hw&$!70_A9%nsQ0Lu^<;epYLHqUtzCC|yq4;_kddGc?OgOzG`JVD ziqYrazI}_S-=BV2+aAg6)%Zx@W6f5Nq>_oLsh!>Um$H)L;^H*03ucy|XEh?_gh6g1t1TO&nETtb3@GweeXWM}YcHvHGG7eEtuX|u)?#?#%? zLsVW-VOQt1SNQf=`ts#;F-S)5wXUu>OH-vn&uj^2miF8}1}BCI+H}CY?Mx2}4vvCM zqzl=IpH<3Tz{SeyYQLFKplNtuZf@@QSzGUaqsPU~U3O+n_8@VUGE)Q-BYwAT-;RJB zmDdi+Vf)og=#>u0L-du`cYgHs-KS?{>?0#1Yl(`Cv_fGf8XFtAjEyI=qoboQftpCR zJ;X;s=uO#Gs3p91mnTX}O4gT`ml;9jEurh1hi{M=yxK+=(<~aI+&?<%Em=CR>R!qX`-8hm`3P}T(D37>ATo=#ztDt8DFL=zx^=g#85Gts2~4%!&)yf=SY7aa>qNO=Bv zJVT9r^zh-E1+U4YiD2-K(4B>$KMnQu^<4s!wB399uXkmr$ha3;I)yyyRZmaPGKlgs z5R2=2kDME8;o-XJ>FJgAKt1svk#CHam@54TX_-)U9kf=d5>2;%c-Zq792EF9=PgW$ zWRbW0W-FcXKQpkqt6ho0M|8Bb`1#t_zm-^@JB!2i+a2w$>{#@?^Elj`?UbaWqdP{B z2B@XovLU0R_cb;DV$K4ij%a0;2p%4u+jDR4e1Cuc()9FnPt@avGV39e37?~tzwj#3 z@Q&=FqJ#9bw4xUn86{b$s06RXcDw_#dUFH;rkCMFfDa$ z!mhg5Ur&B-@@cUNMx3(dCB_D+;z#wD^up^ti6k=B28_gdlDk?g+3z?_WOsFJ3e>qG&D`~ja%WVhHM9SjYng&3Ht~+XK%X>RJWj^`` zY!|V*5OhL+ScZp(g$c9 zEIRL6;|<3v@4Z@@9Jx`L@bGXZABbfdWE2!_Aii^|F&t~~h(}wgr~jluf4mR37wEzj zO$nc8LQ`QbyLj;@Uqx)$^dB!pca*)_-cov#Nb6Vowi^ol2sQU0g?pN-{)OKlJqH&!6kS#2wty((0!tEnxjjA%v_prX$Gw(o(hN!-wu* z%&Ibqiv>zbONA?d`qhG;D){n6;+!cjDJf~Sb?J!K8Gk7$F-2qBx3`_YpOFl5GP3!+ z++3n^+i}llAl?NKOQ|uRv4gp}du1i|tK0mZjGoMjl$PLZ)F2v&?d|Sn?(Xen{abab z_o~gx;>HLIZ)F-4v4UdOUqqt?GzP7B+0!lp(47?CEenjVxFcpap$5V^_PM5J`QQw= z4K3}Y#NYsnn8p7J1701?LRv`IZi#R@poX8qM;y+!B{bZ3_fNHsINaAzpNDL{rTBwxgMAl zCy8YIX$tktBX(04Bv-eF*q(!nYqzbveGk6jdd$x3WT{Sl$HGQ9)EJ}Uz2DPxZ)(hz^6jIxWfu`nnP`biFh#nV^M;9@F?g)&}hvEX= zI>_vNH(Kd5Mr}mvOD%d=t30;MH`mwg4ZxLugx=iN(n1CiaxxSQv4xr07?dNE_SM%f z(O6>DiTd-?)XondJSejUah8W-wHf)8(E&&+$mjMQRn;YpWQix0?iQU$xJqpTbfhz{`C$uoW?oTw%tYJTTV80N-}GM$Wj*YWMwl4-hLAYrrN zk&%kW2Vj-`v1tz*=}AAo3@oK0lbGxH%z~pp0%?Gu?crK8ynv8%uMs8 z#l>9iwueN6?93a#8($b}X=!=qYUgDot7jpDk8ECDLe;%vH?<9!v?=ClAjI4<6G|jR z=mC~w!YQ=|DtsO|#qI9y?j$lIg?C0p^}3J;i~zJlk9<2aqPqjBz6D~%4m6-CNT9Qz z9D)FR#T*?S$pi%j70=j{L7qu9^%n8xk7G(*&Beu$eBqs{a118AXW&u!czEW&K#typ z9NPuv7RCT+UxR{zoez&Yp`U|;<+I;jUmK-lmGPFidew^Q`$Sa-WAL z2CTaV(8NWM6E}$V%1|^?FVk3+9uK>aDL$c%oZ}{=XN%#uv<+_y#$y~Zi7s!v=8H!3 z#{uYNx-EuJ-ntlRtmnGq``hZ||onf3k%4{yH#UVLW7CQO}>->3xg{4{tFC-1WJl;%|6F#O*!{d`LZT zY%H}PB3{Ks3uiAi8Dc|}Q4WZ@ZZ>g4+$kq5z7Qu7&5b~G4EVL~`FYE)5S9bJQ3ar5 zpsiU7(0>3^aeS67*l`?kj4@Xz`5qA?Z~H1M_P0n|QZpXKbMf+aWrHgL%=Zk?y-F?B zdrAQ0;LLTrJU#JEO-yWO=jO_J_x=co79EGsnlvO9)^|3RW~HIvU+MG!=T;!_aVC8|dp9yM=+t zGIWJ=LY-}4_F#U#B_>|V02->Az_;SLn3Kh)5+l+s8d{_z&8NA^V{(%m7Y*vu%JBkX=G0#bYvLfLxGBl$}ya|64z=&upI>YdV1wF zG&G<4Vef_d$*>_?o12BL5I@{KJqtH$go>KEXFO{s_MvC_Qn9eeGzRuZdvE}XWr4r6 z76yOzdKZb2g>29V-f|XJ*FVkzy4KlvY`DafpNoToZ1q=J-mlJ1>(E&Vkrt=G@Bp^o zF0~e80qZVdE`tfGP5!B=lT!CZGIzEySnzdoS*bpU&anQ5aB{us!pCjmNRZokbR~fyI!>gXw5J6Eqq~Y7Quuq>qi>&-R*y^nT637oRS+WF9(*mTP zHajc~B;SK6s4XpZiiO3|H8N66_1^5*TK+1QENTA0E4X>;91;zs-v`lAXvg#TJ$ckuQt>Lt|+@f7jI1EWzYWN?e?N5}fGSaOs+7?TKjJP(;mY4e132ik@>6!3wZD z%G|S7+?!xuyWzE;SzCA3I{j6rLHtBSL}(xtltDGyebOta1LFb^^&Wto3oIp@4cBdbBE%4?XWi#Q;oI2QbV<7`_RE7+l1KSFR(ZDq z4(ph*cYpbTM~t3qp*@OK7BaMOd`!&Gm*|*>5kWy^d?F=998P99Q|U8hwY zK3c0kHL`H~vm-|KE{#XUbxrveHQnr&#H%UyXj*5xy1L52 zS@i*dTK@Cr#J+5{qX48&uA)G-O)&TF9nYRC`-nx|VE=9 z4OL|iIy6p-)U!;da7Ry7U>s*Bbs~o*toiaK_uqD(#vTP3*{!Us zti5;$D}gxN*b|H}7R3#2?kw9H*ZW9YJ2?CpGB3>mPOl2Yf&1kgIO$*S2duao#I#bU z|NIQxq{8BaEqb&JsvI+HU}_(N#)pQ=eTHbG_EyKe8UfcIPI#^5z_A3rkbgK6B`cE+ zJ{at2&k>-5PxC<+#|NLsxD$X_OilXQs(RqdxJgdh+N& zh%UAA67-Xt!a}~H>S{>{1jUfh@?h*UdQRS+9S7sSfaa;_A87o`xRAbuZAWn?7JAe9 znVC|agicnXKv_|F)D$FxZ$!kz7idUY7gxut`7hGYi0mI6Tt~zd8p+_N;#S71bw;7R z*a^^6pFwINVd0{_a*m-N1hnyA+)iXLzJ&s$m|MKGs^fs`i=Yt%8rHZwxPkFII6Fp1 zF&yLyDCU1PZ#Ej~y$owT%YGm4P5Nfk)=D1%f=dPBxlKq++*=%-v>ICZ{Al+V_|N6# zrKPia${S#i8)7|M8e71K&`@l`JXqFmybey5n%jB<<@L;>H)UK_TACS)!hl4$z%nPC zEwzlGV96=5?0#b~EZ_p%KG26EFmZO)@?9P-S%X|hVP;`bF;kXc!d|J(Yq+%POP%@V zLL^)O&bb(1%0g|bNdi9cy zKOPuX+jof;G0);CS1$z=2JGyovvdT!|M(H`6&;xENXh(LZ(gfEm`gg%tF3)r3foVB z07jg}p<(dZXH^!&0%NXevHF8oy@`wK=TGQl z?mxVMO%ebdEXw%!acp~Y^ONsDc60I@lfK?!xN)+O>^5=p7tl#Al^8NaE+#hih>}Ih z{Z3ZjrSljzPR`l3tzOyFD)lTzZnJ{CFJ)ynPT}ZJ*a;;zC6@h;_@_hC`_NH2P0b(LAwatr1?0=fC~!hAwcY(KR+%9 zi;@kV`Z_dy0xBx1`yho*zyhw(xN1#;0DOX^pycJ{rEO_B|EKWM)7`mM^&YAew*>ErJ1?mLJC3czES zbgeHqTw>b$v#%2Jn%2@595$P@bnTXf?9oW-JN2ymFJC;5VAg32u=Vk6V3Y8Wrm(!i zfD3$LV`JrENVfs$L-MTpuvly+1=x5;*uDWs0i}LHe|i75&^Nz-&!%c?YpV~a93$=6 zttnOl?HSDeB885@=`Rd}b&`Wbve2(L9@!w6mTKV`(zzcG3+5|b|4AzS?bBT zaII)G6A%yl5Px7$kouNFF{QDELv}D;09$34&1WMv776G1e7%Bl!%F+K&u0^@me33Q zhet>Hz~a_mZqRd$ChP$YeP-X^!mtA9_!Bj?uKgeBN>OW*^{0J)etwBc-KVmrTe5

I|>|nyVLl@adde24VQ?B^CyU+7!Hn>rYpguAMM59D%n36LK)P!ZrR_Nnlfti6RQ4YiPJ5<22Vb0lkQ+_#v-6 zf8G9c4lprWSFQl1;X2!7eW%E8tPAv>WvKnF5}Qmv$Ie5 ziSPv|BLst>iYvOzos?h{kqsSrM&9}x^>dY&?@&zLFwI|VqQCt5PTB9d1-uzjL=#f3 zHH;Zn!GDfAZ_Ra!myR$=NlO<%sg^_I;5WChV1%A^L`p*P*2&TFt@rcie=qfSg=1Dm znwFJY!SOV}{Q>LScWIVgs}r)a{#reL{8%3-w|FXd@g1pzrStVCJ4M7VV>R#xdf=T2BsTU?(PqZwDbM~6sllDiNirUf!VVR3v`bj<}KF&hqJ!9 z0;k;xa_wi>6nfKaO`flCl?71$;11k;cp%#DlGJX;Wy*vhgl_tKq|{;$V2%cmQv<-s z$=o<)96AjKyzRcnL%zpd5NL71!Hkn<*Dnn3-TT==$x3CD1ZO{eGXid(1&3_>ADej=ryHu{17%brilgTZ=f)Juq~fq zv@z>gr2t+5?o=vSQoWkZT%zELDLW;-VqtF1g-eTsyF+egk(QW{u&a`Ae_{)dv;xML z0U*7xXA_DyLOEKwfk2U2TzeMf%|sY3D=8{&!vyN8IT=WE7aU#!cnj6#ukFvz)Ax!i z+^`W%S~Nc0G4{PFAb?IvP9A@tr}yjKt5-i{Z``=}&+c^Bu0A^_XQT=^=*HRjbet82 z;=XV}O0iC}m3IAZ($0VAqHXt;>m0ae@4=_Bp{Q}uzOn4riof&}64)2L@6?n}R0{&1 z8NjaVmv(kXOshkn?Cr8$m**EFWTSE`3lk(47*mThyEw)vIo`T!Mjl-Fx5Og!15ZO) znE0djO~&}NV|!s5-#dO$QC6{`*!8(vc9Ki16SE$LsapT%c(~`nk4f{m@}VgNkhSom z!UF+?v#}#rY>8X+`L6}C;RTF4J%z^>VgJOvS_d57e!Yc-HD zYbxD6A-P|8cq1+bf_8?hUuhypMhGTZ4g_R|Spv#F@JeR#2Ha49!Pms-rs7NKq{0kk za zNRlHTI+jDYZ*|q0gbO-KBxB)$H#x~UBKWLQbVmyj^c&g2ofZpX`17sq{bUNLy+ik9?MBkIzpsWO zI;2TSma1kvG%2%=uYisc3Ul-3&1W#l>4r+qM=PT+l`wpBgt==1 zfKZ{RDkq1fip_dr|?$*Ol@@_z~a7v{c5hO7&uoDV+88*Wj(CW-nC5&8oq04_duYi=mBPqE$+m?RP|V zNX1p-3#PYtd~#t4ln(CMt9!TXP<#D%gJ4C~krG@>_FHi0o%3EC?GINWQH4$~}5QEE<^<%q4<#&sX2?BQ|n8qh7?Vg#G%*beeVb5b#6E=97${>3Sy zMXR81t^V?gOi*>ZV*!;cS8wNw`~~CbQ~8-n{X=3Zx|3+)h>gmY;KbF_${oz=Qimp| z8-%pdBi0*#g9G2LzIq`!-^hsCTVri$Z=QcVBpqJVo(X+;6J{oc9X&mDt{c;>XTwI8 zNyYs#dB*JuIgNg=HWL^ydR{-92^jDtP*YPA(U1hr@K4slH;ljM-lXuJDg|?*3@^~y z);0m`;1j|63XQ Date: Wed, 22 Jun 2022 17:11:34 +0200 Subject: [PATCH 166/785] fix lucan logo name --- ...o_On_White-HR.png => lucan_Logo_On_White-HR.png} | Bin 1 file changed, 0 insertions(+), 0 deletions(-) rename website/static/img/{Logo_On_White-HR.png => lucan_Logo_On_White-HR.png} (100%) diff --git a/website/static/img/Logo_On_White-HR.png b/website/static/img/lucan_Logo_On_White-HR.png similarity index 100% rename from website/static/img/Logo_On_White-HR.png rename to website/static/img/lucan_Logo_On_White-HR.png From 5b037244fe4e43c7a738e9fe2ace5c6f1883e72e Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Thu, 23 Jun 2022 11:36:22 +0300 Subject: [PATCH 167/785] Append capture schema. --- .../schemas/schema_maya_capture.json | 71 ++++++++++++++++++- 1 file changed, 70 insertions(+), 1 deletion(-) diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json index ace9fc22da..f8dba0be4b 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json @@ -238,14 +238,83 @@ "key": "ssaoEnable", "label": "Screen Space Ambient Occlusion" }, + { + "type": "number", + "key": "ssaoAmount", + "label": "SSAO Amount" + }, + { + "type": "number", + "key": "ssaoRadius", + "label": "SSAO Radius" + }, + { + "type": "number", + "key": "ssaoFilterRadius", + "label": "SSAO Filter Radius" + }, + { + "type": "number", + "key": "ssaoSamples", + "label": "SSAO Samples", + "decimal": 0, + "minimum": 8, + "maximum": 32 + }, { "type": "splitter" }, { "type": "boolean", - "key": "fogging", + "key": "hwFogEnable", "label": "Enable Hardware Fog" }, + { + "type": "enum", + "key": "hwFogFalloff", + "label": "Hardware Falloff", + "enum_items": [ + { "0": "Linear"}, + { "1": "Exponential"}, + { "2": "Exponential Squared"} + ] + }, + { + "type": "number", + "key": "hwFogStart", + "label": "Fog Start" + }, + { + "type": "number", + "key": "hwFogEnd", + "label": "Fog End" + }, + { + "type": "number", + "key": "hwFogAlpha", + "label": "Enable Fog Alpha" + }, + { + "type": "splitter" + }, + { + "type": "boolean", + "key": "motionBlurEnable", + "label": "Enable Motion Blur" + }, + { + "type": "number", + "key": "motionBlurSampleCount", + "label": "Motion Blur Sample Count", + "decimal": 0, + "minimum": 8, + "maximum": 32 + }, + { + "type": "number", + "key": "motionBlurShutterOpenFraction", + "label": "Shutter Open Fraction" + }, { "type": "splitter" }, From 4a521ec081e22b226655f49a41a38dbe31ef8f49 Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Thu, 23 Jun 2022 11:39:23 +0300 Subject: [PATCH 168/785] Adjust --- .../settings/defaults/project_settings/maya.json | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index 8494989556..437adbc1f0 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -501,7 +501,18 @@ "textures": true, "twoSidedLighting": true, "ssaoEnable": true, - "fogging": true, + "ssaoAmount": 0, + "ssaoRadius": 0, + "ssaoFilterRadius": 0, + "ssaoSamples": 8, + "hwFogEnable": true, + "hwFogFalloff": "0", + "hwFogStart": 0, + "hwFogEnd": 0, + "hwFogAlpha": 0, + "motionBlurEnable": true, + "motionBlurSampleCount": 8, + "motionBlurShutterOpenFraction": 0, "cameras": false, "clipGhosts": false, "controlVertices": false, From ea6b098bb2f86d027815e8e685ff5932a85fcc73 Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Thu, 23 Jun 2022 11:40:01 +0300 Subject: [PATCH 169/785] Adjust `load_capture_preset()` to work with additional settings. --- openpype/hosts/maya/api/lib.py | 62 +++++++++++++++++++++++++++++++++- 1 file changed, 61 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/maya/api/lib.py b/openpype/hosts/maya/api/lib.py index de9a9da911..bd403ad340 100644 --- a/openpype/hosts/maya/api/lib.py +++ b/openpype/hosts/maya/api/lib.py @@ -2525,8 +2525,19 @@ def load_capture_preset(data=None): if key == 'ssaoEnable': if preset[id][key] is True: temp_options2['ssaoEnable'] = True + temp_options2['ssaoSamples'] = preset[id][key] else: temp_options2['ssaoEnable'] = False + temp_options2['ssaoSamples'] = preset[id][key] + + if key == 'ssaoAmount': + temp_options2['ssaoAmount'] = preset[id][key] + + if key == 'ssaoRadius': + temp_options2['ssaoRadius'] = preset[id][key] + + if key == 'ssaoFilterRadius': + temp_options2['ssaoFilterRadius'] = preset[id][key] if key == 'alphaCut': temp_options2['transparencyAlgorithm'] = 5 @@ -2535,6 +2546,42 @@ def load_capture_preset(data=None): if key == 'headsUpDisplay': temp_options['headsUpDisplay'] = True + if key == 'hwFogEnable': + if preset[id][key] is True: + temp_options2['hwFogEnable'] = True + else: + temp_options2['hwFogEnable'] = False + + if key == 'hwFogStart': + temp_options2['hwFogStart'] = preset[id][key] + + if key == 'hwFogEnd': + temp_options2['hwFogEnd'] = preset[id][key] + + if key == 'hwFogAlpha': + temp_options2['hwFogAlpha'] = preset[id][key] + + if key == 'hwFogFalloff': + temp_options2['hwFogFalloff'] = preset[id][key] + + if key == 'motionBlurEnable': + if preset[id][key] is True: + temp_options2['motionBlurEnable'] = True + else: + temp_options2['motionBlurEnable'] = False + + if key == 'motionBlurSampleCount': + temp_options2['motionBlurSampleCount'] = preset[id][key] + + if key == 'motionBlurShutterOpenFraction': + temp_options2['motionBlurShutterOpenFraction'] = preset[id][key] + + if key == 'lineAAEnable': + if preset[id][key] is True: + temp_options2['lineAAEnable'] = True + else: + temp_options2['lineAAEnable'] = False + else: temp_options[str(key)] = preset[id][key] @@ -2544,7 +2591,20 @@ def load_capture_preset(data=None): 'gpuCacheDisplayFilter', 'multiSample', 'ssaoEnable', - 'textureMaxResolution' + 'ssaoSamples', + 'ssaoAmount', + 'ssaoFilterRadius', + 'ssaoRadius', + 'hwFogEnable', + 'hwFogStart', + 'hwFogEnd', + 'hwFogAlpha', + 'hwFogFalloff', + 'textureMaxResolution', + 'motionBlurEnable', + 'motionBlurSampleCount', + 'motionBlurShutterOpenFraction', + 'lineAAEnable', ]: temp_options.pop(key, None) From f5e3f56981660fd6131cf80d802cb6ed382a6915 Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Thu, 23 Jun 2022 12:31:50 +0300 Subject: [PATCH 170/785] Append AA flag as schema key. --- .../schemas/schema_maya_capture.json | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json index f8dba0be4b..2c5aed8a67 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json @@ -233,6 +233,14 @@ { "type": "splitter" }, + { + "type": "boolean", + "key": "lineAAEnable", + "label": "Enable Anti-Aliasing" + }, + { + "type": "splitter" + }, { "type": "boolean", "key": "ssaoEnable", @@ -313,7 +321,10 @@ { "type": "number", "key": "motionBlurShutterOpenFraction", - "label": "Shutter Open Fraction" + "label": "Shutter Open Fraction", + "decimal": 3, + "minimum": 0.01, + "maximum": 32 }, { "type": "splitter" From f497956c5fc31657f9798137c303b359004046c9 Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Thu, 23 Jun 2022 12:31:58 +0300 Subject: [PATCH 171/785] Adjust schema defaults. --- openpype/settings/defaults/project_settings/maya.json | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index 437adbc1f0..48a34068db 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -500,6 +500,7 @@ "shadows": true, "textures": true, "twoSidedLighting": true, + "lineAAEnable": true, "ssaoEnable": true, "ssaoAmount": 0, "ssaoRadius": 0, @@ -512,7 +513,7 @@ "hwFogAlpha": 0, "motionBlurEnable": true, "motionBlurSampleCount": 8, - "motionBlurShutterOpenFraction": 0, + "motionBlurShutterOpenFraction": 0.01, "cameras": false, "clipGhosts": false, "controlVertices": false, From 98b912f0b36634c345faaf0e617385460a402af1 Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Thu, 23 Jun 2022 12:32:15 +0300 Subject: [PATCH 172/785] Change key check placement. --- openpype/hosts/maya/api/lib.py | 25 +++++++++++-------------- 1 file changed, 11 insertions(+), 14 deletions(-) diff --git a/openpype/hosts/maya/api/lib.py b/openpype/hosts/maya/api/lib.py index bd403ad340..12cbac2a32 100644 --- a/openpype/hosts/maya/api/lib.py +++ b/openpype/hosts/maya/api/lib.py @@ -2525,10 +2525,11 @@ def load_capture_preset(data=None): if key == 'ssaoEnable': if preset[id][key] is True: temp_options2['ssaoEnable'] = True - temp_options2['ssaoSamples'] = preset[id][key] else: temp_options2['ssaoEnable'] = False - temp_options2['ssaoSamples'] = preset[id][key] + + if key == 'ssaoSamples': + temp_options2['ssaoSamples'] = preset[id][key] if key == 'ssaoAmount': temp_options2['ssaoAmount'] = preset[id][key] @@ -2549,20 +2550,16 @@ def load_capture_preset(data=None): if key == 'hwFogEnable': if preset[id][key] is True: temp_options2['hwFogEnable'] = True + temp_options2['hwFogStart'] = preset[id][key] + temp_options2['hwFogEnd'] = preset[id][key] + temp_options2['hwFogAlpha'] = preset[id][key] + temp_options2['hwFogFalloff'] = preset[id][key] else: temp_options2['hwFogEnable'] = False - - if key == 'hwFogStart': - temp_options2['hwFogStart'] = preset[id][key] - - if key == 'hwFogEnd': - temp_options2['hwFogEnd'] = preset[id][key] - - if key == 'hwFogAlpha': - temp_options2['hwFogAlpha'] = preset[id][key] - - if key == 'hwFogFalloff': - temp_options2['hwFogFalloff'] = preset[id][key] + temp_options2['hwFogStart'] = preset[id][key] + temp_options2['hwFogEnd'] = preset[id][key] + temp_options2['hwFogAlpha'] = preset[id][key] + temp_options2['hwFogFalloff'] = preset[id][key] if key == 'motionBlurEnable': if preset[id][key] is True: From ae897ed2901de70da53ff16bfc5a6d3fa1bc1432 Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Thu, 23 Jun 2022 13:07:38 +0300 Subject: [PATCH 173/785] Append Fog Color Key to schema. --- .../schemas/projects_schema/schemas/schema_maya_capture.json | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json index 2c5aed8a67..08207824b1 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json @@ -302,6 +302,11 @@ "key": "hwFogAlpha", "label": "Enable Fog Alpha" }, + { + "type": "color", + "key": "hwFogColor", + "label": "Fog Color" + }, { "type": "splitter" }, From 4c1bba042d69bfbc807f2aea81a004fe27593163 Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Thu, 23 Jun 2022 13:08:13 +0300 Subject: [PATCH 174/785] Append Fog Color to functionk, fix loop bug. --- openpype/hosts/maya/api/lib.py | 25 +++++++++++++++++-------- 1 file changed, 17 insertions(+), 8 deletions(-) diff --git a/openpype/hosts/maya/api/lib.py b/openpype/hosts/maya/api/lib.py index 12cbac2a32..4b76757e97 100644 --- a/openpype/hosts/maya/api/lib.py +++ b/openpype/hosts/maya/api/lib.py @@ -2550,16 +2550,24 @@ def load_capture_preset(data=None): if key == 'hwFogEnable': if preset[id][key] is True: temp_options2['hwFogEnable'] = True - temp_options2['hwFogStart'] = preset[id][key] - temp_options2['hwFogEnd'] = preset[id][key] - temp_options2['hwFogAlpha'] = preset[id][key] - temp_options2['hwFogFalloff'] = preset[id][key] + else: temp_options2['hwFogEnable'] = False - temp_options2['hwFogStart'] = preset[id][key] - temp_options2['hwFogEnd'] = preset[id][key] - temp_options2['hwFogAlpha'] = preset[id][key] - temp_options2['hwFogFalloff'] = preset[id][key] + + if key == 'hwFogStart': + temp_options2['hwFogStart'] = preset[id][key] + + if key == 'hwFogEnd': + temp_options2['hwFogEnd'] = preset[id][key] + + if key == 'hwFogAlpha': + temp_options2['hwFogAlpha'] = preset[id][key] + + if key == 'hwFogFalloff': + temp_options2['hwFogFalloff'] = int(preset[id][key]) + + if key == 'hwFogColor': + temp_options2['hwFogColor'] = preset[id][key] if key == 'motionBlurEnable': if preset[id][key] is True: @@ -2597,6 +2605,7 @@ def load_capture_preset(data=None): 'hwFogEnd', 'hwFogAlpha', 'hwFogFalloff', + 'hwFogColor', 'textureMaxResolution', 'motionBlurEnable', 'motionBlurSampleCount', From bb339e0bbd9828a650f213d8839fecf7ca0f3ff7 Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Thu, 23 Jun 2022 13:08:26 +0300 Subject: [PATCH 175/785] Append schema default for color. --- openpype/settings/defaults/project_settings/maya.json | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index 48a34068db..874e23400e 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -511,6 +511,12 @@ "hwFogStart": 0, "hwFogEnd": 0, "hwFogAlpha": 0, + "hwFogColor": [ + 158, + 53, + 53, + 255 + ], "motionBlurEnable": true, "motionBlurSampleCount": 8, "motionBlurShutterOpenFraction": 0.01, From f2ef34c1a2b5dc531c476f5661a79bd8dfeb7d15 Mon Sep 17 00:00:00 2001 From: "Allan I. A" <76656700+Allan-I@users.noreply.github.com> Date: Thu, 23 Jun 2022 19:18:28 +0300 Subject: [PATCH 176/785] Update openpype/hosts/maya/api/lib.py MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Simplify key test. Co-authored-by: Ondřej Samohel <33513211+antirotor@users.noreply.github.com> --- openpype/hosts/maya/api/lib.py | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/openpype/hosts/maya/api/lib.py b/openpype/hosts/maya/api/lib.py index 4b76757e97..b9f23a2c0e 100644 --- a/openpype/hosts/maya/api/lib.py +++ b/openpype/hosts/maya/api/lib.py @@ -2548,11 +2548,7 @@ def load_capture_preset(data=None): temp_options['headsUpDisplay'] = True if key == 'hwFogEnable': - if preset[id][key] is True: - temp_options2['hwFogEnable'] = True - - else: - temp_options2['hwFogEnable'] = False + temp_options2['hwFogEnable'] = preset[id][key] or False if key == 'hwFogStart': temp_options2['hwFogStart'] = preset[id][key] From 3af840f8881432634f776a904655be32536de000 Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Fri, 24 Jun 2022 04:00:33 +0300 Subject: [PATCH 177/785] Add Color Options --- openpype/hosts/maya/api/lib.py | 14 ++++++++--- .../defaults/project_settings/maya.json | 9 +++---- .../schemas/schema_maya_capture.json | 25 ++++++++++++++++--- 3 files changed, 36 insertions(+), 12 deletions(-) diff --git a/openpype/hosts/maya/api/lib.py b/openpype/hosts/maya/api/lib.py index 4b76757e97..c6585f5a8f 100644 --- a/openpype/hosts/maya/api/lib.py +++ b/openpype/hosts/maya/api/lib.py @@ -2566,8 +2566,14 @@ def load_capture_preset(data=None): if key == 'hwFogFalloff': temp_options2['hwFogFalloff'] = int(preset[id][key]) - if key == 'hwFogColor': - temp_options2['hwFogColor'] = preset[id][key] + if key == 'hwFogColorR': + temp_options2['hwFogColorR'] = preset[id][key] + + if key == 'hwFogColorG': + temp_options2['hwFogColorG'] = preset[id][key] + + if key == 'hwFogColorB': + temp_options2['hwFogColorB'] = preset[id][key] if key == 'motionBlurEnable': if preset[id][key] is True: @@ -2605,7 +2611,9 @@ def load_capture_preset(data=None): 'hwFogEnd', 'hwFogAlpha', 'hwFogFalloff', - 'hwFogColor', + 'hwFogColorR', + 'hwFogColorG', + 'hwFogColorB', 'textureMaxResolution', 'motionBlurEnable', 'motionBlurSampleCount', diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index 874e23400e..7057160a40 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -511,12 +511,9 @@ "hwFogStart": 0, "hwFogEnd": 0, "hwFogAlpha": 0, - "hwFogColor": [ - 158, - 53, - 53, - 255 - ], + "hwFogColorR": 0, + "hwFogColorG": 0, + "hwFogColorB": 0, "motionBlurEnable": true, "motionBlurSampleCount": 8, "motionBlurShutterOpenFraction": 0.01, diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json index 08207824b1..42685623ce 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json @@ -303,9 +303,28 @@ "label": "Enable Fog Alpha" }, { - "type": "color", - "key": "hwFogColor", - "label": "Fog Color" + "type": "number", + "key": "hwFogColorR", + "label": "Fog Color R", + "decimal": 2, + "minimum": 0, + "maximum": 1 + }, + { + "type": "number", + "key": "hwFogColorG", + "label": "Fog Color G", + "decimal": 2, + "minimum": 0, + "maximum": 1 + }, + { + "type": "number", + "key": "hwFogColorB", + "label": "Fog Color B", + "decimal": 2, + "minimum": 0, + "maximum": 1 }, { "type": "splitter" From d0f1f897fcfb67623d21dd87a25e969ab03180a7 Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Fri, 24 Jun 2022 11:18:20 +0300 Subject: [PATCH 178/785] Append SSAO Radius minimum. --- .../schemas/projects_schema/schemas/schema_maya_capture.json | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json index 42685623ce..d9d565943f 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json @@ -259,7 +259,10 @@ { "type": "number", "key": "ssaoFilterRadius", - "label": "SSAO Filter Radius" + "label": "SSAO Filter Radius", + "decimal": 0, + "minimum": 1, + "maximum": 32 }, { "type": "number", From 54b8056238250d8699b2c59bfafd13947a88ff4d Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Fri, 24 Jun 2022 11:19:25 +0300 Subject: [PATCH 179/785] Adjust defaults. --- openpype/settings/defaults/project_settings/maya.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index 7057160a40..293648385b 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -504,7 +504,7 @@ "ssaoEnable": true, "ssaoAmount": 0, "ssaoRadius": 0, - "ssaoFilterRadius": 0, + "ssaoFilterRadius": 1, "ssaoSamples": 8, "hwFogEnable": true, "hwFogFalloff": "0", From 96fefa32c039fee52bc2120ab3c8c23bb63d8de6 Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Fri, 24 Jun 2022 12:50:20 +0300 Subject: [PATCH 180/785] Add Fog Density --- .../projects_schema/schemas/schema_maya_capture.json | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json index d9d565943f..6e5eb43dd0 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json @@ -290,6 +290,11 @@ { "2": "Exponential Squared"} ] }, + { + "type": "number", + "key": "hwFogDensity", + "label": "Fog Density" + }, { "type": "number", "key": "hwFogStart", @@ -303,7 +308,7 @@ { "type": "number", "key": "hwFogAlpha", - "label": "Enable Fog Alpha" + "label": "Fog Alpha" }, { "type": "number", From 66cce36da5646553e6ffde4ec278433c47aea202 Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Fri, 24 Jun 2022 12:50:30 +0300 Subject: [PATCH 181/785] Adjust defaults to Maya Values --- .../defaults/project_settings/maya.json | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index 293648385b..bd8d1fecf5 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -502,21 +502,21 @@ "twoSidedLighting": true, "lineAAEnable": true, "ssaoEnable": true, - "ssaoAmount": 0, - "ssaoRadius": 0, - "ssaoFilterRadius": 1, - "ssaoSamples": 8, + "ssaoAmount": 1, + "ssaoRadius": 16, + "ssaoFilterRadius": 16, + "ssaoSamples": 16, "hwFogEnable": true, "hwFogFalloff": "0", "hwFogStart": 0, - "hwFogEnd": 0, + "hwFogEnd": 100, "hwFogAlpha": 0, - "hwFogColorR": 0, - "hwFogColorG": 0, - "hwFogColorB": 0, + "hwFogColorR": 1.0, + "hwFogColorG": 1.0, + "hwFogColorB": 1.0, "motionBlurEnable": true, "motionBlurSampleCount": 8, - "motionBlurShutterOpenFraction": 0.01, + "motionBlurShutterOpenFraction": 0.2, "cameras": false, "clipGhosts": false, "controlVertices": false, From 5b9168b421ed91870beb11bccc2dc9bf71136370 Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Fri, 24 Jun 2022 12:51:54 +0300 Subject: [PATCH 182/785] Append density default --- openpype/settings/defaults/project_settings/maya.json | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index bd8d1fecf5..bf6c18bb95 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -508,6 +508,7 @@ "ssaoSamples": 16, "hwFogEnable": true, "hwFogFalloff": "0", + "hwFogDensity": 0, "hwFogStart": 0, "hwFogEnd": 100, "hwFogAlpha": 0, From 76cf10a61186d38a4dd1f4c3bc33edce7d605e06 Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Fri, 24 Jun 2022 13:08:47 +0300 Subject: [PATCH 183/785] Add missing hwFogDensity key. --- openpype/hosts/maya/api/lib.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/openpype/hosts/maya/api/lib.py b/openpype/hosts/maya/api/lib.py index 166f348319..924dc03729 100644 --- a/openpype/hosts/maya/api/lib.py +++ b/openpype/hosts/maya/api/lib.py @@ -2537,6 +2537,9 @@ def load_capture_preset(data=None): if key == 'ssaoRadius': temp_options2['ssaoRadius'] = preset[id][key] + if key == 'hwFogDensity': + temp_options2['hwFogDensity'] = preset[id][key] + if key == 'ssaoFilterRadius': temp_options2['ssaoFilterRadius'] = preset[id][key] @@ -2610,6 +2613,7 @@ def load_capture_preset(data=None): 'hwFogColorR', 'hwFogColorG', 'hwFogColorB', + 'hwFogDensity', 'textureMaxResolution', 'motionBlurEnable', 'motionBlurSampleCount', From 0486b99a3438f8b70635e06087a0bbc9f9676b1d Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Fri, 24 Jun 2022 14:14:41 +0300 Subject: [PATCH 184/785] Change hwFogDensity into float key. --- .../schemas/projects_schema/schemas/schema_maya_capture.json | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json index 6e5eb43dd0..a87918878d 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json @@ -293,7 +293,10 @@ { "type": "number", "key": "hwFogDensity", - "label": "Fog Density" + "label": "Fog Density", + "decimal": 2, + "minimum": 0, + "maximum": 1 }, { "type": "number", From a080eb00ece263ddb645f7f562934fd8f5364073 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Fri, 24 Jun 2022 17:05:45 +0200 Subject: [PATCH 185/785] Convert basic families from old standalone publiser to the new one --- .../project_settings/traypublisher.json | 184 +++++++++++++++++- 1 file changed, 182 insertions(+), 2 deletions(-) diff --git a/openpype/settings/defaults/project_settings/traypublisher.json b/openpype/settings/defaults/project_settings/traypublisher.json index 0b54cfd39e..da5fb2e8b5 100644 --- a/openpype/settings/defaults/project_settings/traypublisher.json +++ b/openpype/settings/defaults/project_settings/traypublisher.json @@ -8,8 +8,8 @@ "default_variants": [ "Main" ], - "description": "Publish workfile backup", - "detailed_description": "", + "description": "Backup of a working scene", + "detailed_description": "Workfiles are full scenes from any application that are directly edited by artists. They represent a state of work on a task at a given point and are usually not directly referenced into other scenes.", "allow_sequences": true, "extensions": [ ".ma", @@ -30,6 +30,186 @@ ".psb", ".aep" ] + }, + { + "family": "model", + "identifier": "", + "label": "Model", + "icon": "fa.cubes", + "default_variants": [ + "Main", + "Proxy", + "Sculpt" + ], + "description": "Clean models", + "detailed_description": "Models should only contain geometry data, without any extras like cameras, locators or bones. It should be ready to be loaded into other scenes as is.\n", + "allow_sequences": false, + "extensions": [ + ".ma", + ".mb", + ".obj", + ".abc", + ".fbx", + ".bgeo", + ".bgeogz", + ".bgeosc", + ".usd", + ".blend" + ] + }, + { + "family": "pointcache", + "identifier": "", + "label": "Pointcache", + "icon": "fa.gears", + "default_variants": [ + "Main" + ], + "description": "Geometry Caches", + "detailed_description": "Alembic or bgeo cache of animated data", + "allow_sequences": true, + "extensions": [ + ".abc", + ".bgeo", + ".bgeogz", + ".bgeosc" + ] + }, + { + "family": "plate", + "identifier": "", + "label": "Plate", + "icon": "mdi.camera-image", + "default_variants": [ + "Main", + "BG", + "Animatic", + "Reference", + "Offline" + ], + "description": "Footage Plates", + "detailed_description": "Any type of image seqeuence coming from outside of the studio. Usually camera footage, but could also be animatics used for reference.", + "allow_sequences": true, + "extensions": [ + ".exr", + ".png", + ".dpx", + ".jpg", + ".tiff", + ".tif", + ".mov", + ".mp4", + ".avi" + ] + }, + { + "family": "render", + "identifier": "", + "label": "Render", + "icon": "mdi.folder-multiple-image", + "default_variants": [], + "description": "Rendered images or video", + "detailed_description": "Sequence or single file renders", + "allow_sequences": true, + "extensions": [ + ".exr", + ".png", + ".dpx", + ".jpg", + ".tiff", + ".tif", + ".mov", + ".mp4", + ".avi" + ] + }, + { + "family": "camera", + "identifier": "", + "label": "Camera", + "icon": "fa.video-camera", + "default_variants": [], + "description": "3d Camera", + "detailed_description": "Ideally this should be only camera itself with baked animation, however, it can technically also include helper geometry.", + "allow_sequences": false, + "extensions": [ + ".abc", + ".ma", + ".hip", + ".blend", + ".fbx", + ".usd" + ] + }, + { + "family": "image", + "identifier": "", + "label": "Image", + "icon": "fa.image", + "default_variants": [ + "Reference", + "Texture", + "Concept", + "Background" + ], + "description": "Single image", + "detailed_description": "Any image data can be published as image family. References, textures, concept art, matte paints. This is a fallback 2d family for everything that doesn't fit more specific family.", + "allow_sequences": false, + "extensions": [ + ".exr", + ".jpg", + ".dpx", + ".bmp", + ".tif", + ".tiff", + ".png", + ".psb", + ".psd" + ] + }, + { + "family": "vdb", + "identifier": "", + "label": "VDB Volumes", + "icon": "fa.cloud", + "default_variants": [], + "description": "Sparse volumetric data", + "detailed_description": "Hierarchical data structure for the efficient storage and manipulation of sparse volumetric data discretized on three-dimensional grids", + "allow_sequences": true, + "extensions": [ + ".vdb" + ] + }, + { + "family": "matchmove", + "identifier": "", + "label": "Matchmove", + "icon": "fa.empire", + "default_variants": [ + "Camera", + "Object", + "Mocap" + ], + "description": "Matchmoving script", + "detailed_description": "Script exported from matchmoving application to be later processed into a tracked camera with additional data", + "allow_sequences": false, + "extensions": [] + }, + { + "family": "rig", + "identifier": "", + "label": "Rig", + "icon": "fa.wheelchair", + "default_variants": [], + "description": "CG rig file", + "detailed_description": "CG rigged character or prop. Rig should be clean of any extra data and directly loadable into it's respective application\t", + "allow_sequences": false, + "extensions": [ + ".ma", + ".blend", + ".hip", + ".hda" + ] } ] } \ No newline at end of file From 5ec1f8c1ccf094bf11d7f9e9bd722d4606e0d127 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Fri, 24 Jun 2022 17:47:28 +0200 Subject: [PATCH 186/785] add unreal texture and tweak model help --- .../defaults/project_settings/traypublisher.json | 15 ++++++++++++++- 1 file changed, 14 insertions(+), 1 deletion(-) diff --git a/openpype/settings/defaults/project_settings/traypublisher.json b/openpype/settings/defaults/project_settings/traypublisher.json index da5fb2e8b5..5afaaee78c 100644 --- a/openpype/settings/defaults/project_settings/traypublisher.json +++ b/openpype/settings/defaults/project_settings/traypublisher.json @@ -42,7 +42,7 @@ "Sculpt" ], "description": "Clean models", - "detailed_description": "Models should only contain geometry data, without any extras like cameras, locators or bones. It should be ready to be loaded into other scenes as is.\n", + "detailed_description": "Models should only contain geometry data, without any extras like cameras, locators or bones.\n\nKeep in mind that models published from tray publisher are not validated for correctness. ", "allow_sequences": false, "extensions": [ ".ma", @@ -210,6 +210,19 @@ ".hip", ".hda" ] + }, + { + "family": "simpleUnrealTexture", + "identifier": "", + "label": "Simple UE texture", + "icon": "fa.image", + "default_variants": [ + "" + ], + "description": "Simple Unreal Engine texture", + "detailed_description": "Texture files with Unreal Engine naming conventions", + "allow_sequences": false, + "extensions": [] } ] } \ No newline at end of file From f25c662c37ced86fde5c22764665c55b9268edea Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Sat, 25 Jun 2022 11:21:25 +0300 Subject: [PATCH 187/785] Replace `hwFogEnable` with `fogging` flag. --- openpype/hosts/maya/api/lib.py | 5 ++--- openpype/settings/defaults/project_settings/maya.json | 2 +- .../schemas/projects_schema/schemas/schema_maya_capture.json | 2 +- 3 files changed, 4 insertions(+), 5 deletions(-) diff --git a/openpype/hosts/maya/api/lib.py b/openpype/hosts/maya/api/lib.py index 924dc03729..cd41ba3ffd 100644 --- a/openpype/hosts/maya/api/lib.py +++ b/openpype/hosts/maya/api/lib.py @@ -2550,8 +2550,8 @@ def load_capture_preset(data=None): if key == 'headsUpDisplay': temp_options['headsUpDisplay'] = True - if key == 'hwFogEnable': - temp_options2['hwFogEnable'] = preset[id][key] or False + if key == 'fogging': + temp_options['fogging'] = preset[id][key] or False if key == 'hwFogStart': temp_options2['hwFogStart'] = preset[id][key] @@ -2605,7 +2605,6 @@ def load_capture_preset(data=None): 'ssaoAmount', 'ssaoFilterRadius', 'ssaoRadius', - 'hwFogEnable', 'hwFogStart', 'hwFogEnd', 'hwFogAlpha', diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index bf6c18bb95..77bc8118df 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -506,7 +506,7 @@ "ssaoRadius": 16, "ssaoFilterRadius": 16, "ssaoSamples": 16, - "hwFogEnable": true, + "fogging": true, "hwFogFalloff": "0", "hwFogDensity": 0, "hwFogStart": 0, diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json index a87918878d..2323fbaba0 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json @@ -277,7 +277,7 @@ }, { "type": "boolean", - "key": "hwFogEnable", + "key": "fogging", "label": "Enable Hardware Fog" }, { From 9c4791b169e3f867fb23857d4215bec00a5a4ec7 Mon Sep 17 00:00:00 2001 From: "Allan I. A" <76656700+Allan-I@users.noreply.github.com> Date: Mon, 27 Jun 2022 11:41:56 +0300 Subject: [PATCH 188/785] Update openpype/settings/defaults/project_settings/maya.json MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Ondřej Samohel <33513211+antirotor@users.noreply.github.com> --- openpype/settings/defaults/project_settings/maya.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index 7dcefeff3f..ceac9ed814 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -35,7 +35,7 @@ "default_render_image_folder": "", "aov_separator": "underscore", "arnold_renderer": { - "image_prefix": "", + "image_prefix": "maya///_", "image_format": "exr", "multilayer_exr": true, "tiled": true, From f395e659d66f551822206e6fe202e3b7cf8485ee Mon Sep 17 00:00:00 2001 From: "Allan I. A" <76656700+Allan-I@users.noreply.github.com> Date: Mon, 27 Jun 2022 11:42:06 +0300 Subject: [PATCH 189/785] Update openpype/settings/defaults/project_settings/maya.json MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Ondřej Samohel <33513211+antirotor@users.noreply.github.com> --- openpype/settings/defaults/project_settings/maya.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index ceac9ed814..b76d0444f3 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -43,7 +43,7 @@ "additional_options": {} }, "vray_renderer": { - "image_prefix": "", + "image_prefix": "maya///", "engine": "1", "image_format": "png", "aov_list": [], From ecdade9ff325bbe14285487c2b03cd24af7df472 Mon Sep 17 00:00:00 2001 From: "Allan I. A" <76656700+Allan-I@users.noreply.github.com> Date: Mon, 27 Jun 2022 11:42:17 +0300 Subject: [PATCH 190/785] Update openpype/settings/defaults/project_settings/maya.json MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Ondřej Samohel <33513211+antirotor@users.noreply.github.com> --- openpype/settings/defaults/project_settings/maya.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index b76d0444f3..555c7c62a0 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -50,7 +50,7 @@ "additional_options": {} }, "redshift_renderer": { - "image_prefix": "", + "image_prefix": "maya///", "primary_gi_engine": "0", "secondary_gi_engine": "0", "image_format": "iff", From b74e144dbb6d9fda8ac701d957d7c9fa98005139 Mon Sep 17 00:00:00 2001 From: "Allan I. A" <76656700+Allan-I@users.noreply.github.com> Date: Mon, 27 Jun 2022 12:15:13 +0300 Subject: [PATCH 191/785] Update openpype/settings/defaults/project_settings/maya.json MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Ondřej Samohel <33513211+antirotor@users.noreply.github.com> --- openpype/settings/defaults/project_settings/maya.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index 77bc8118df..2f1dca6978 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -506,7 +506,7 @@ "ssaoRadius": 16, "ssaoFilterRadius": 16, "ssaoSamples": 16, - "fogging": true, + "fogging": false, "hwFogFalloff": "0", "hwFogDensity": 0, "hwFogStart": 0, From 800a2f90d2baaf59ad662a42a920012fac827931 Mon Sep 17 00:00:00 2001 From: "Allan I. A" <76656700+Allan-I@users.noreply.github.com> Date: Mon, 27 Jun 2022 12:15:21 +0300 Subject: [PATCH 192/785] Update openpype/settings/defaults/project_settings/maya.json MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Ondřej Samohel <33513211+antirotor@users.noreply.github.com> --- openpype/settings/defaults/project_settings/maya.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index 2f1dca6978..117b260853 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -515,7 +515,7 @@ "hwFogColorR": 1.0, "hwFogColorG": 1.0, "hwFogColorB": 1.0, - "motionBlurEnable": true, + "motionBlurEnable": false, "motionBlurSampleCount": 8, "motionBlurShutterOpenFraction": 0.2, "cameras": false, From dd5cef560a44a513ae3395fd84f6c02c1adffff8 Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Tue, 28 Jun 2022 05:56:36 +0300 Subject: [PATCH 193/785] Adjust setting position --- .../schemas/schema_maya_capture.json | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json index 2323fbaba0..0a63315622 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json @@ -204,14 +204,6 @@ { "type": "splitter" }, - { - "type": "number", - "key": "multiSample", - "label": "Anti Aliasing Samples", - "decimal": 0, - "minimum": 0, - "maximum": 32 - }, { "type": "splitter" }, @@ -238,6 +230,14 @@ "key": "lineAAEnable", "label": "Enable Anti-Aliasing" }, + { + "type": "number", + "key": "multiSample", + "label": "Anti Aliasing Samples", + "decimal": 0, + "minimum": 0, + "maximum": 32 + }, { "type": "splitter" }, From e9f67f8747cad536c591884701fc441f177d3574 Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Tue, 28 Jun 2022 05:56:51 +0300 Subject: [PATCH 194/785] Adjust some defaults. --- openpype/settings/defaults/project_settings/maya.json | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index 117b260853..bb7719dc30 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -496,19 +496,19 @@ "override_viewport_options": true, "displayLights": "default", "textureMaxResolution": 1024, - "multiSample": 4, "shadows": true, "textures": true, "twoSidedLighting": true, "lineAAEnable": true, - "ssaoEnable": true, + "multiSample": 8, + "ssaoEnable": false, "ssaoAmount": 1, "ssaoRadius": 16, "ssaoFilterRadius": 16, "ssaoSamples": 16, "fogging": false, "hwFogFalloff": "0", - "hwFogDensity": 0, + "hwFogDensity": 0.0, "hwFogStart": 0, "hwFogEnd": 100, "hwFogAlpha": 0, From 9500e08a7d66646b07047b4bad6cf8e80bb99631 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Tue, 28 Jun 2022 16:49:50 +0200 Subject: [PATCH 195/785] update gitignore --- .gitignore | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.gitignore b/.gitignore index 7eaef69873..ea5b20eb69 100644 --- a/.gitignore +++ b/.gitignore @@ -102,5 +102,8 @@ website/.docusaurus .poetry/ .python-version +.editorconfig +.pre-commit-config.yaml +mypy.ini tools/run_eventserver.* From de5c4bffc46e5e2e93c6ea7b993e48d4b79da0a8 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Tue, 28 Jun 2022 16:50:22 +0200 Subject: [PATCH 196/785] adding shotgrid back to a realease --- .../plugins/publish/submit_maya_deadline.py | 1 + openpype/modules/shotgrid/README.md | 19 ++ openpype/modules/shotgrid/__init__.py | 5 + openpype/modules/shotgrid/lib/__init__.py | 0 openpype/modules/shotgrid/lib/const.py | 1 + openpype/modules/shotgrid/lib/credentials.py | 125 +++++++++++ openpype/modules/shotgrid/lib/record.py | 20 ++ openpype/modules/shotgrid/lib/settings.py | 18 ++ .../publish/collect_shotgrid_entities.py | 100 +++++++++ .../publish/collect_shotgrid_session.py | 123 +++++++++++ .../publish/integrate_shotgrid_publish.py | 77 +++++++ .../publish/integrate_shotgrid_version.py | 92 ++++++++ .../plugins/publish/validate_shotgrid_user.py | 38 ++++ openpype/modules/shotgrid/server/README.md | 5 + openpype/modules/shotgrid/shotgrid_module.py | 58 +++++ .../tests/shotgrid/lib/test_credentials.py | 34 +++ .../shotgrid/tray/credential_dialog.py | 201 ++++++++++++++++++ .../modules/shotgrid/tray/shotgrid_tray.py | 75 +++++++ openpype/resources/app_icons/shotgrid.png | Bin 0 -> 45744 bytes .../defaults/project_settings/shotgrid.json | 22 ++ .../defaults/system_settings/modules.json | 8 +- openpype/settings/entities/__init__.py | 2 + openpype/settings/entities/enum_entity.py | 114 ++++++---- .../schemas/projects_schema/schema_main.json | 4 + .../schema_project_shotgrid.json | 98 +++++++++ .../schemas/schema_representation_tags.json | 3 + .../schemas/system_schema/schema_modules.json | 54 +++++ poetry.lock | 16 ++ pyproject.toml | 1 + 29 files changed, 1276 insertions(+), 38 deletions(-) create mode 100644 openpype/modules/shotgrid/README.md create mode 100644 openpype/modules/shotgrid/__init__.py create mode 100644 openpype/modules/shotgrid/lib/__init__.py create mode 100644 openpype/modules/shotgrid/lib/const.py create mode 100644 openpype/modules/shotgrid/lib/credentials.py create mode 100644 openpype/modules/shotgrid/lib/record.py create mode 100644 openpype/modules/shotgrid/lib/settings.py create mode 100644 openpype/modules/shotgrid/plugins/publish/collect_shotgrid_entities.py create mode 100644 openpype/modules/shotgrid/plugins/publish/collect_shotgrid_session.py create mode 100644 openpype/modules/shotgrid/plugins/publish/integrate_shotgrid_publish.py create mode 100644 openpype/modules/shotgrid/plugins/publish/integrate_shotgrid_version.py create mode 100644 openpype/modules/shotgrid/plugins/publish/validate_shotgrid_user.py create mode 100644 openpype/modules/shotgrid/server/README.md create mode 100644 openpype/modules/shotgrid/shotgrid_module.py create mode 100644 openpype/modules/shotgrid/tests/shotgrid/lib/test_credentials.py create mode 100644 openpype/modules/shotgrid/tray/credential_dialog.py create mode 100644 openpype/modules/shotgrid/tray/shotgrid_tray.py create mode 100644 openpype/resources/app_icons/shotgrid.png create mode 100644 openpype/settings/defaults/project_settings/shotgrid.json create mode 100644 openpype/settings/entities/schemas/projects_schema/schema_project_shotgrid.json diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 9964e3c646..dff80e62b9 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -519,6 +519,7 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin): "FTRACK_API_KEY", "FTRACK_API_USER", "FTRACK_SERVER", + "OPENPYPE_SG_USER", "AVALON_PROJECT", "AVALON_ASSET", "AVALON_TASK", diff --git a/openpype/modules/shotgrid/README.md b/openpype/modules/shotgrid/README.md new file mode 100644 index 0000000000..cbee0e9bf4 --- /dev/null +++ b/openpype/modules/shotgrid/README.md @@ -0,0 +1,19 @@ +## Shotgrid Module + +### Pre-requisites + +Install and launch a [shotgrid leecher](https://github.com/Ellipsanime/shotgrid-leecher) server + +### Quickstart + +The goal of this tutorial is to synchronize an already existing shotgrid project with OpenPype. + +- Activate the shotgrid module in the **system settings** and inform the shotgrid leecher server API url + +- Create a new OpenPype project with the **project manager** + +- Inform the shotgrid authentication infos (url, script name, api key) and the shotgrid project ID related to this OpenPype project in the **project settings** + +- Use the batch interface (Tray > shotgrid > Launch batch), select your project and click "batch" + +- You can now access your shotgrid entities within the **avalon launcher** and publish informations to shotgrid with **pyblish** diff --git a/openpype/modules/shotgrid/__init__.py b/openpype/modules/shotgrid/__init__.py new file mode 100644 index 0000000000..f1337a9492 --- /dev/null +++ b/openpype/modules/shotgrid/__init__.py @@ -0,0 +1,5 @@ +from .shotgrid_module import ( + ShotgridModule, +) + +__all__ = ("ShotgridModule",) diff --git a/openpype/modules/shotgrid/lib/__init__.py b/openpype/modules/shotgrid/lib/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/openpype/modules/shotgrid/lib/const.py b/openpype/modules/shotgrid/lib/const.py new file mode 100644 index 0000000000..2a34800fac --- /dev/null +++ b/openpype/modules/shotgrid/lib/const.py @@ -0,0 +1 @@ +MODULE_NAME = "shotgrid" diff --git a/openpype/modules/shotgrid/lib/credentials.py b/openpype/modules/shotgrid/lib/credentials.py new file mode 100644 index 0000000000..337c4f6ecb --- /dev/null +++ b/openpype/modules/shotgrid/lib/credentials.py @@ -0,0 +1,125 @@ + +from urllib.parse import urlparse + +import shotgun_api3 +from shotgun_api3.shotgun import AuthenticationFault + +from openpype.lib import OpenPypeSecureRegistry, OpenPypeSettingsRegistry +from openpype.modules.shotgrid.lib.record import Credentials + + +def _get_shotgrid_secure_key(hostname, key): + """Secure item key for entered hostname.""" + return f"shotgrid/{hostname}/{key}" + + +def _get_secure_value_and_registry( + hostname, + name, +): + key = _get_shotgrid_secure_key(hostname, name) + registry = OpenPypeSecureRegistry(key) + return registry.get_item(name, None), registry + + +def get_shotgrid_hostname(shotgrid_url): + + if not shotgrid_url: + raise Exception("Shotgrid url cannot be a null") + valid_shotgrid_url = ( + f"//{shotgrid_url}" if "//" not in shotgrid_url else shotgrid_url + ) + return urlparse(valid_shotgrid_url).hostname + + +# Credentials storing function (using keyring) + + +def get_credentials(shotgrid_url): + hostname = get_shotgrid_hostname(shotgrid_url) + if not hostname: + return None + login_value, _ = _get_secure_value_and_registry( + hostname, + Credentials.login_key_prefix(), + ) + password_value, _ = _get_secure_value_and_registry( + hostname, + Credentials.password_key_prefix(), + ) + return Credentials(login_value, password_value) + + +def save_credentials(login, password, shotgrid_url): + hostname = get_shotgrid_hostname(shotgrid_url) + _, login_registry = _get_secure_value_and_registry( + hostname, + Credentials.login_key_prefix(), + ) + _, password_registry = _get_secure_value_and_registry( + hostname, + Credentials.password_key_prefix(), + ) + clear_credentials(shotgrid_url) + login_registry.set_item(Credentials.login_key_prefix(), login) + password_registry.set_item(Credentials.password_key_prefix(), password) + + +def clear_credentials(shotgrid_url): + hostname = get_shotgrid_hostname(shotgrid_url) + login_value, login_registry = _get_secure_value_and_registry( + hostname, + Credentials.login_key_prefix(), + ) + password_value, password_registry = _get_secure_value_and_registry( + hostname, + Credentials.password_key_prefix(), + ) + + if login_value is not None: + login_registry.delete_item(Credentials.login_key_prefix()) + + if password_value is not None: + password_registry.delete_item(Credentials.password_key_prefix()) + + +# Login storing function (using json) + + +def get_local_login(): + reg = OpenPypeSettingsRegistry() + try: + return str(reg.get_item("shotgrid_login")) + except Exception: + return None + + +def save_local_login(login): + reg = OpenPypeSettingsRegistry() + reg.set_item("shotgrid_login", login) + + +def clear_local_login(): + reg = OpenPypeSettingsRegistry() + reg.delete_item("shotgrid_login") + + +def check_credentials( + login, + password, + shotgrid_url, +): + + if not shotgrid_url or not login or not password: + return False + try: + session = shotgun_api3.Shotgun( + shotgrid_url, + login=login, + password=password, + ) + session.preferences_read() + session.close() + except AuthenticationFault: + return False + return True diff --git a/openpype/modules/shotgrid/lib/record.py b/openpype/modules/shotgrid/lib/record.py new file mode 100644 index 0000000000..f62f4855d5 --- /dev/null +++ b/openpype/modules/shotgrid/lib/record.py @@ -0,0 +1,20 @@ + +class Credentials: + login = None + password = None + + def __init__(self, login, password) -> None: + super().__init__() + self.login = login + self.password = password + + def is_empty(self): + return not (self.login and self.password) + + @staticmethod + def login_key_prefix(): + return "login" + + @staticmethod + def password_key_prefix(): + return "password" diff --git a/openpype/modules/shotgrid/lib/settings.py b/openpype/modules/shotgrid/lib/settings.py new file mode 100644 index 0000000000..924099f04b --- /dev/null +++ b/openpype/modules/shotgrid/lib/settings.py @@ -0,0 +1,18 @@ +from openpype.api import get_system_settings, get_project_settings +from openpype.modules.shotgrid.lib.const import MODULE_NAME + + +def get_shotgrid_project_settings(project): + return get_project_settings(project).get(MODULE_NAME, {}) + + +def get_shotgrid_settings(): + return get_system_settings().get("modules", {}).get(MODULE_NAME, {}) + + +def get_shotgrid_servers(): + return get_shotgrid_settings().get("shotgrid_settings", {}) + + +def get_leecher_backend_url(): + return get_shotgrid_settings().get("leecher_backend_url") diff --git a/openpype/modules/shotgrid/plugins/publish/collect_shotgrid_entities.py b/openpype/modules/shotgrid/plugins/publish/collect_shotgrid_entities.py new file mode 100644 index 0000000000..0b03ac2e5d --- /dev/null +++ b/openpype/modules/shotgrid/plugins/publish/collect_shotgrid_entities.py @@ -0,0 +1,100 @@ +import os + +import pyblish.api +from openpype.lib.mongo import OpenPypeMongoConnection + + +class CollectShotgridEntities(pyblish.api.ContextPlugin): + """Collect shotgrid entities according to the current context""" + + order = pyblish.api.CollectorOrder + 0.499 + label = "Shotgrid entities" + + def process(self, context): + + avalon_project = context.data.get("projectEntity") + avalon_asset = context.data.get("assetEntity") + avalon_task_name = os.getenv("AVALON_TASK") + + self.log.info(avalon_project) + self.log.info(avalon_asset) + + sg_project = _get_shotgrid_project(context) + sg_task = _get_shotgrid_task( + avalon_project, + avalon_asset, + avalon_task_name + ) + sg_entity = _get_shotgrid_entity(avalon_project, avalon_asset) + + if sg_project: + context.data["shotgridProject"] = sg_project + self.log.info( + "Collected correspondig shotgrid project : {}".format( + sg_project + ) + ) + + if sg_task: + context.data["shotgridTask"] = sg_task + self.log.info( + "Collected correspondig shotgrid task : {}".format(sg_task) + ) + + if sg_entity: + context.data["shotgridEntity"] = sg_entity + self.log.info( + "Collected correspondig shotgrid entity : {}".format(sg_entity) + ) + + def _find_existing_version(self, code, context): + + filters = [ + ["project", "is", context.data.get("shotgridProject")], + ["sg_task", "is", context.data.get("shotgridTask")], + ["entity", "is", context.data.get("shotgridEntity")], + ["code", "is", code], + ] + + sg = context.data.get("shotgridSession") + return sg.find_one("Version", filters, []) + + +def _get_shotgrid_collection(project): + client = OpenPypeMongoConnection.get_mongo_client() + return client.get_database("shotgrid_openpype").get_collection(project) + + +def _get_shotgrid_project(context): + shotgrid_project_id = context.data["project_settings"].get( + "shotgrid_project_id") + if shotgrid_project_id: + return {"type": "Project", "id": shotgrid_project_id} + return {} + + +def _get_shotgrid_task(avalon_project, avalon_asset, avalon_task): + sg_col = _get_shotgrid_collection(avalon_project["name"]) + shotgrid_task_hierarchy_row = sg_col.find_one( + { + "type": "Task", + "_id": {"$regex": "^" + avalon_task + "_[0-9]*"}, + "parent": {"$regex": ".*," + avalon_asset["name"] + ","}, + } + ) + if shotgrid_task_hierarchy_row: + return {"type": "Task", "id": shotgrid_task_hierarchy_row["src_id"]} + return {} + + +def _get_shotgrid_entity(avalon_project, avalon_asset): + sg_col = _get_shotgrid_collection(avalon_project["name"]) + shotgrid_entity_hierarchy_row = sg_col.find_one( + {"_id": avalon_asset["name"]} + ) + if shotgrid_entity_hierarchy_row: + return { + "type": shotgrid_entity_hierarchy_row["type"], + "id": shotgrid_entity_hierarchy_row["src_id"], + } + return {} diff --git a/openpype/modules/shotgrid/plugins/publish/collect_shotgrid_session.py b/openpype/modules/shotgrid/plugins/publish/collect_shotgrid_session.py new file mode 100644 index 0000000000..9d5d2271bf --- /dev/null +++ b/openpype/modules/shotgrid/plugins/publish/collect_shotgrid_session.py @@ -0,0 +1,123 @@ +import os + +import pyblish.api +import shotgun_api3 +from shotgun_api3.shotgun import AuthenticationFault + +from openpype.lib import OpenPypeSettingsRegistry +from openpype.modules.shotgrid.lib.settings import ( + get_shotgrid_servers, + get_shotgrid_project_settings, +) + + +class CollectShotgridSession(pyblish.api.ContextPlugin): + """Collect shotgrid session using user credentials""" + + order = pyblish.api.CollectorOrder + label = "Shotgrid user session" + + def process(self, context): + + certificate_path = os.getenv("SHOTGUN_API_CACERTS") + if certificate_path is None or not os.path.exists(certificate_path): + self.log.info( + "SHOTGUN_API_CACERTS does not contains a valid \ + path: {}".format( + certificate_path + ) + ) + certificate_path = get_shotgrid_certificate() + self.log.info("Get Certificate from shotgrid_api") + + if not os.path.exists(certificate_path): + self.log.error( + "Could not find certificate in shotgun_api3: \ + {}".format( + certificate_path + ) + ) + return + + set_shotgrid_certificate(certificate_path) + self.log.info("Set Certificate: {}".format(certificate_path)) + + avalon_project = os.getenv("AVALON_PROJECT") + + shotgrid_settings = get_shotgrid_project_settings(avalon_project) + self.log.info("shotgrid settings: {}".format(shotgrid_settings)) + shotgrid_servers_settings = get_shotgrid_servers() + self.log.info( + "shotgrid_servers_settings: {}".format(shotgrid_servers_settings) + ) + + shotgrid_server = shotgrid_settings.get("shotgrid_server", "") + if not shotgrid_server: + self.log.error( + "No Shotgrid server found, please choose a credential" + "in script name and script key in OpenPype settings" + ) + + shotgrid_server_setting = shotgrid_servers_settings.get( + shotgrid_server, {} + ) + shotgrid_url = shotgrid_server_setting.get("shotgrid_url", "") + + shotgrid_script_name = shotgrid_server_setting.get( + "shotgrid_script_name", "" + ) + shotgrid_script_key = shotgrid_server_setting.get( + "shotgrid_script_key", "" + ) + if not shotgrid_script_name and not shotgrid_script_key: + self.log.error( + "No Shotgrid api credential found, please enter " + "script name and script key in OpenPype settings" + ) + + login = get_login() or os.getenv("OPENPYPE_SG_USER") + + if not login: + self.log.error( + "No Shotgrid login found, please " + "login to shotgrid withing openpype Tray" + ) + + session = shotgun_api3.Shotgun( + base_url=shotgrid_url, + script_name=shotgrid_script_name, + api_key=shotgrid_script_key, + sudo_as_login=login, + ) + + try: + session.preferences_read() + except AuthenticationFault: + raise ValueError( + "Could not connect to shotgrid {} with user {}".format( + shotgrid_url, login + ) + ) + + self.log.info( + "Logged to shotgrid {} with user {}".format(shotgrid_url, login) + ) + context.data["shotgridSession"] = session + context.data["shotgridUser"] = login + + +def get_shotgrid_certificate(): + shotgun_api_path = os.path.dirname(shotgun_api3.__file__) + return os.path.join(shotgun_api_path, "lib", "certifi", "cacert.pem") + + +def set_shotgrid_certificate(certificate): + os.environ["SHOTGUN_API_CACERTS"] = certificate + + +def get_login(): + reg = OpenPypeSettingsRegistry() + try: + return str(reg.get_item("shotgrid_login")) + except Exception: + return None diff --git a/openpype/modules/shotgrid/plugins/publish/integrate_shotgrid_publish.py b/openpype/modules/shotgrid/plugins/publish/integrate_shotgrid_publish.py new file mode 100644 index 0000000000..cfd2d10fd9 --- /dev/null +++ b/openpype/modules/shotgrid/plugins/publish/integrate_shotgrid_publish.py @@ -0,0 +1,77 @@ +import os +import pyblish.api + + +class IntegrateShotgridPublish(pyblish.api.InstancePlugin): + """ + Create published Files from representations and add it to version. If + representation is tagged add shotgrid review, it will add it in + path to movie for a movie file or path to frame for an image sequence. + """ + + order = pyblish.api.IntegratorOrder + 0.499 + label = "Shotgrid Published Files" + + def process(self, instance): + + context = instance.context + + self.sg = context.data.get("shotgridSession") + + shotgrid_version = instance.data.get("shotgridVersion") + + for representation in instance.data.get("representations", []): + + local_path = representation.get("published_path") + code = os.path.basename(local_path) + + if representation.get("tags", []): + continue + + published_file = self._find_existing_publish( + code, context, shotgrid_version + ) + + published_file_data = { + "project": context.data.get("shotgridProject"), + "code": code, + "entity": context.data.get("shotgridEntity"), + "task": context.data.get("shotgridTask"), + "version": shotgrid_version, + "path": {"local_path": local_path}, + } + if not published_file: + published_file = self._create_published(published_file_data) + self.log.info( + "Create Shotgrid PublishedFile: {}".format(published_file) + ) + else: + self.sg.update( + published_file["type"], + published_file["id"], + published_file_data, + ) + self.log.info( + "Update Shotgrid PublishedFile: {}".format(published_file) + ) + + if instance.data["family"] == "image": + self.sg.upload_thumbnail( + published_file["type"], published_file["id"], local_path + ) + instance.data["shotgridPublishedFile"] = published_file + + def _find_existing_publish(self, code, context, shotgrid_version): + + filters = [ + ["project", "is", context.data.get("shotgridProject")], + ["task", "is", context.data.get("shotgridTask")], + ["entity", "is", context.data.get("shotgridEntity")], + ["version", "is", shotgrid_version], + ["code", "is", code], + ] + return self.sg.find_one("PublishedFile", filters, []) + + def _create_published(self, published_file_data): + + return self.sg.create("PublishedFile", published_file_data) diff --git a/openpype/modules/shotgrid/plugins/publish/integrate_shotgrid_version.py b/openpype/modules/shotgrid/plugins/publish/integrate_shotgrid_version.py new file mode 100644 index 0000000000..a1b7140e22 --- /dev/null +++ b/openpype/modules/shotgrid/plugins/publish/integrate_shotgrid_version.py @@ -0,0 +1,92 @@ +import os +import pyblish.api + + +class IntegrateShotgridVersion(pyblish.api.InstancePlugin): + """Integrate Shotgrid Version""" + + order = pyblish.api.IntegratorOrder + 0.497 + label = "Shotgrid Version" + + sg = None + + def process(self, instance): + + context = instance.context + self.sg = context.data.get("shotgridSession") + + # TODO: Use path template solver to build version code from settings + anatomy = instance.data.get("anatomyData", {}) + code = "_".join( + [ + anatomy["project"]["code"], + anatomy["parent"], + anatomy["asset"], + anatomy["task"]["name"], + "v{:03}".format(int(anatomy["version"])), + ] + ) + + version = self._find_existing_version(code, context) + + if not version: + version = self._create_version(code, context) + self.log.info("Create Shotgrid version: {}".format(version)) + else: + self.log.info("Use existing Shotgrid version: {}".format(version)) + + data_to_update = {} + status = context.data.get("intent", {}).get("value") + if status: + data_to_update["sg_status_list"] = status + + for representation in instance.data.get("representations", []): + local_path = representation.get("published_path") + code = os.path.basename(local_path) + + if "shotgridreview" in representation.get("tags", []): + + if representation["ext"] in ["mov", "avi"]: + self.log.info( + "Upload review: {} for version shotgrid {}".format( + local_path, version.get("id") + ) + ) + self.sg.upload( + "Version", + version.get("id"), + local_path, + field_name="sg_uploaded_movie", + ) + + data_to_update["sg_path_to_movie"] = local_path + + elif representation["ext"] in ["jpg", "png", "exr", "tga"]: + path_to_frame = local_path.replace("0000", "#") + data_to_update["sg_path_to_frames"] = path_to_frame + + self.log.info("Update Shotgrid version with {}".format(data_to_update)) + self.sg.update("Version", version["id"], data_to_update) + + instance.data["shotgridVersion"] = version + + def _find_existing_version(self, code, context): + + filters = [ + ["project", "is", context.data.get("shotgridProject")], + ["sg_task", "is", context.data.get("shotgridTask")], + ["entity", "is", context.data.get("shotgridEntity")], + ["code", "is", code], + ] + return self.sg.find_one("Version", filters, []) + + def _create_version(self, code, context): + + version_data = { + "project": context.data.get("shotgridProject"), + "sg_task": context.data.get("shotgridTask"), + "entity": context.data.get("shotgridEntity"), + "code": code, + } + + return self.sg.create("Version", version_data) diff --git a/openpype/modules/shotgrid/plugins/publish/validate_shotgrid_user.py b/openpype/modules/shotgrid/plugins/publish/validate_shotgrid_user.py new file mode 100644 index 0000000000..c14c980e2a --- /dev/null +++ b/openpype/modules/shotgrid/plugins/publish/validate_shotgrid_user.py @@ -0,0 +1,38 @@ +import pyblish.api +import openpype.api + + +class ValidateShotgridUser(pyblish.api.ContextPlugin): + """ + Check if user is valid and have access to the project. + """ + + label = "Validate Shotgrid User" + order = openpype.api.ValidateContentsOrder + + def process(self, context): + sg = context.data.get("shotgridSession") + + login = context.data.get("shotgridUser") + self.log.info("Login shotgrid set in OpenPype is {}".format(login)) + project = context.data.get("shotgridProject") + self.log.info("Current shotgun project is {}".format(project)) + + if not (login and sg and project): + raise KeyError() + + user = sg.find_one("HumanUser", [["login", "is", login]], ["projects"]) + + self.log.info(user) + self.log.info(login) + user_projects_id = [p["id"] for p in user.get("projects", [])] + if not project.get("id") in user_projects_id: + raise PermissionError( + "Login {} don't have access to the project {}".format( + login, project + ) + ) + + self.log.info( + "Login {} have access to the project {}".format(login, project) + ) diff --git a/openpype/modules/shotgrid/server/README.md b/openpype/modules/shotgrid/server/README.md new file mode 100644 index 0000000000..15e056ff3e --- /dev/null +++ b/openpype/modules/shotgrid/server/README.md @@ -0,0 +1,5 @@ + +### Shotgrid server + +Please refer to the external project that covers Openpype/Shotgrid communication: + - https://github.com/Ellipsanime/shotgrid-leecher diff --git a/openpype/modules/shotgrid/shotgrid_module.py b/openpype/modules/shotgrid/shotgrid_module.py new file mode 100644 index 0000000000..5644f0c35f --- /dev/null +++ b/openpype/modules/shotgrid/shotgrid_module.py @@ -0,0 +1,58 @@ +import os + +from openpype_interfaces import ( + ITrayModule, + IPluginPaths, + ILaunchHookPaths, +) + +from openpype.modules import OpenPypeModule + +SHOTGRID_MODULE_DIR = os.path.dirname(os.path.abspath(__file__)) + + +class ShotgridModule( + OpenPypeModule, ITrayModule, IPluginPaths, ILaunchHookPaths +): + leecher_manager_url = None + name = "shotgrid" + enabled = False + project_id = None + tray_wrapper = None + + def initialize(self, modules_settings): + shotgrid_settings = modules_settings.get(self.name, dict()) + self.enabled = shotgrid_settings.get("enabled", False) + self.leecher_manager_url = shotgrid_settings.get( + "leecher_manager_url", "" + ) + + def connect_with_modules(self, enabled_modules): + pass + + def get_global_environments(self): + return {"PROJECT_ID": self.project_id} + + def get_plugin_paths(self): + return { + "publish": [ + os.path.join(SHOTGRID_MODULE_DIR, "plugins", "publish") + ] + } + + def get_launch_hook_paths(self): + return os.path.join(SHOTGRID_MODULE_DIR, "hooks") + + def tray_init(self): + from .tray.shotgrid_tray import ShotgridTrayWrapper + + self.tray_wrapper = ShotgridTrayWrapper(self) + + def tray_start(self): + return self.tray_wrapper.validate() + + def tray_exit(self, *args, **kwargs): + return self.tray_wrapper + + def tray_menu(self, tray_menu): + return self.tray_wrapper.tray_menu(tray_menu) diff --git a/openpype/modules/shotgrid/tests/shotgrid/lib/test_credentials.py b/openpype/modules/shotgrid/tests/shotgrid/lib/test_credentials.py new file mode 100644 index 0000000000..1f78cf77c9 --- /dev/null +++ b/openpype/modules/shotgrid/tests/shotgrid/lib/test_credentials.py @@ -0,0 +1,34 @@ +import pytest +from assertpy import assert_that + +import openpype.modules.shotgrid.lib.credentials as sut + + +def test_missing_shotgrid_url(): + with pytest.raises(Exception) as ex: + # arrange + url = "" + # act + sut.get_shotgrid_hostname(url) + # assert + assert_that(ex).is_equal_to("Shotgrid url cannot be a null") + + +def test_full_shotgrid_url(): + # arrange + url = "https://shotgrid.com/myinstance" + # act + actual = sut.get_shotgrid_hostname(url) + # assert + assert_that(actual).is_not_empty() + assert_that(actual).is_equal_to("shotgrid.com") + + +def test_incomplete_shotgrid_url(): + # arrange + url = "shotgrid.com/myinstance" + # act + actual = sut.get_shotgrid_hostname(url) + # assert + assert_that(actual).is_not_empty() + assert_that(actual).is_equal_to("shotgrid.com") diff --git a/openpype/modules/shotgrid/tray/credential_dialog.py b/openpype/modules/shotgrid/tray/credential_dialog.py new file mode 100644 index 0000000000..9d841d98be --- /dev/null +++ b/openpype/modules/shotgrid/tray/credential_dialog.py @@ -0,0 +1,201 @@ +import os +from Qt import QtCore, QtWidgets, QtGui + +from openpype import style +from openpype import resources +from openpype.modules.shotgrid.lib import settings, credentials + + +class CredentialsDialog(QtWidgets.QDialog): + SIZE_W = 450 + SIZE_H = 200 + + _module = None + _is_logged = False + url_label = None + login_label = None + password_label = None + url_input = None + login_input = None + password_input = None + input_layout = None + login_button = None + buttons_layout = None + main_widget = None + + login_changed = QtCore.Signal() + + def __init__(self, module, parent=None): + super(CredentialsDialog, self).__init__(parent) + + self._module = module + self._is_logged = False + + self.setWindowTitle("OpenPype - Shotgrid Login") + + icon = QtGui.QIcon(resources.get_openpype_icon_filepath()) + self.setWindowIcon(icon) + + self.setWindowFlags( + QtCore.Qt.WindowCloseButtonHint + | QtCore.Qt.WindowMinimizeButtonHint + ) + self.setMinimumSize(QtCore.QSize(self.SIZE_W, self.SIZE_H)) + self.setMaximumSize(QtCore.QSize(self.SIZE_W + 100, self.SIZE_H + 100)) + self.setStyleSheet(style.load_stylesheet()) + + self.ui_init() + + def ui_init(self): + self.url_label = QtWidgets.QLabel("Shotgrid server:") + self.login_label = QtWidgets.QLabel("Login:") + self.password_label = QtWidgets.QLabel("Password:") + + self.url_input = QtWidgets.QComboBox() + # self.url_input.setReadOnly(True) + + self.login_input = QtWidgets.QLineEdit() + self.login_input.setPlaceholderText("login") + + self.password_input = QtWidgets.QLineEdit() + self.password_input.setPlaceholderText("password") + self.password_input.setEchoMode(QtWidgets.QLineEdit.Password) + + self.error_label = QtWidgets.QLabel("") + self.error_label.setStyleSheet("color: red;") + self.error_label.setWordWrap(True) + self.error_label.hide() + + self.input_layout = QtWidgets.QFormLayout() + self.input_layout.setContentsMargins(10, 15, 10, 5) + + self.input_layout.addRow(self.url_label, self.url_input) + self.input_layout.addRow(self.login_label, self.login_input) + self.input_layout.addRow(self.password_label, self.password_input) + self.input_layout.addRow(self.error_label) + + self.login_button = QtWidgets.QPushButton("Login") + self.login_button.setToolTip("Log in shotgrid instance") + self.login_button.clicked.connect(self._on_shotgrid_login_clicked) + + self.logout_button = QtWidgets.QPushButton("Logout") + self.logout_button.setToolTip("Log out shotgrid instance") + self.logout_button.clicked.connect(self._on_shotgrid_logout_clicked) + + self.buttons_layout = QtWidgets.QHBoxLayout() + self.buttons_layout.addWidget(self.logout_button) + self.buttons_layout.addWidget(self.login_button) + + self.main_widget = QtWidgets.QVBoxLayout(self) + self.main_widget.addLayout(self.input_layout) + self.main_widget.addLayout(self.buttons_layout) + self.setLayout(self.main_widget) + + def show(self, *args, **kwargs): + super(CredentialsDialog, self).show(*args, **kwargs) + self._fill_shotgrid_url() + self._fill_shotgrid_login() + + def _fill_shotgrid_url(self): + servers = settings.get_shotgrid_servers() + + if servers: + for _, v in servers.items(): + self.url_input.addItem("{}".format(v.get('shotgrid_url'))) + self._valid_input(self.url_input) + self.login_button.show() + self.logout_button.show() + enabled = True + else: + self.set_error("Ask your admin to add shotgrid server in settings") + self._invalid_input(self.url_input) + self.login_button.hide() + self.logout_button.hide() + enabled = False + + self.login_input.setEnabled(enabled) + self.password_input.setEnabled(enabled) + + def _fill_shotgrid_login(self): + login = credentials.get_local_login() + + if login: + self.login_input.setText(login) + + def _clear_shotgrid_login(self): + self.login_input.setText("") + self.password_input.setText("") + + def _on_shotgrid_login_clicked(self): + login = self.login_input.text().strip() + password = self.password_input.text().strip() + missing = [] + + if login == "": + missing.append("login") + self._invalid_input(self.login_input) + + if password == "": + missing.append("password") + self._invalid_input(self.password_input) + + url = self.url_input.currentText() + if url == "": + missing.append("url") + self._invalid_input(self.url_input) + + if len(missing) > 0: + self.set_error("You didn't enter {}".format(" and ".join(missing))) + return + + # if credentials.check_credentials( + # login=login, + # password=password, + # shotgrid_url=url, + # ): + credentials.save_local_login( + login=login + ) + os.environ['OPENPYPE_SG_USER'] = login + self._on_login() + + self.set_error("CANT LOGIN") + + def _on_shotgrid_logout_clicked(self): + credentials.clear_local_login() + del os.environ['OPENPYPE_SG_USER'] + self._clear_shotgrid_login() + self._on_logout() + + def set_error(self, msg): + self.error_label.setText(msg) + self.error_label.show() + + def _on_login(self): + self._is_logged = True + self.login_changed.emit() + self._close_widget() + + def _on_logout(self): + self._is_logged = False + self.login_changed.emit() + + def _close_widget(self): + self.hide() + + def _valid_input(self, input_widget): + input_widget.setStyleSheet("") + + def _invalid_input(self, input_widget): + input_widget.setStyleSheet("border: 1px solid red;") + + def login_with_credentials( + self, url, login, password + ): + verification = credentials.check_credentials(url, login, password) + if verification: + credentials.save_credentials(login, password, False) + self._module.set_credentials_to_env(login, password) + self.set_credentials(login, password) + self.login_changed.emit() + return verification diff --git a/openpype/modules/shotgrid/tray/shotgrid_tray.py b/openpype/modules/shotgrid/tray/shotgrid_tray.py new file mode 100644 index 0000000000..4038d77b03 --- /dev/null +++ b/openpype/modules/shotgrid/tray/shotgrid_tray.py @@ -0,0 +1,75 @@ +import os +import webbrowser + +from Qt import QtWidgets + +from openpype.modules.shotgrid.lib import credentials +from openpype.modules.shotgrid.tray.credential_dialog import ( + CredentialsDialog, +) + + +class ShotgridTrayWrapper: + module = None + credentials_dialog = None + logged_user_label = None + + def __init__(self, module): + self.module = module + self.credentials_dialog = CredentialsDialog(module) + self.credentials_dialog.login_changed.connect(self.set_login_label) + self.logged_user_label = QtWidgets.QAction("") + self.logged_user_label.setDisabled(True) + self.set_login_label() + + def show_batch_dialog(self): + if self.module.leecher_manager_url: + webbrowser.open(self.module.leecher_manager_url) + + def show_connect_dialog(self): + self.show_credential_dialog() + + def show_credential_dialog(self): + self.credentials_dialog.show() + self.credentials_dialog.activateWindow() + self.credentials_dialog.raise_() + + def set_login_label(self): + login = credentials.get_local_login() + if login: + self.logged_user_label.setText("{}".format(login)) + else: + self.logged_user_label.setText( + "No User logged in {0}".format(login) + ) + + def tray_menu(self, tray_menu): + # Add login to user menu + menu = QtWidgets.QMenu("Shotgrid", tray_menu) + show_connect_action = QtWidgets.QAction("Connect to Shotgrid", menu) + show_connect_action.triggered.connect(self.show_connect_dialog) + menu.addAction(self.logged_user_label) + menu.addSeparator() + menu.addAction(show_connect_action) + tray_menu.addMenu(menu) + + # Add manager to Admin menu + for m in tray_menu.findChildren(QtWidgets.QMenu): + if m.title() == "Admin": + shotgrid_manager_action = QtWidgets.QAction( + "Shotgrid manager", menu + ) + shotgrid_manager_action.triggered.connect( + self.show_batch_dialog + ) + m.addAction(shotgrid_manager_action) + + def validate(self): + login = credentials.get_local_login() + + if not login: + self.show_credential_dialog() + else: + os.environ["OPENPYPE_SG_USER"] = login + + return True diff --git a/openpype/resources/app_icons/shotgrid.png b/openpype/resources/app_icons/shotgrid.png new file mode 100644 index 0000000000000000000000000000000000000000..6d0cc047f9ed86e0db45ea557404ab7edb2f5bf2 GIT binary patch literal 45744 zcmeFZby$?$_BTFshcwb4-Q6W2;m}e_=g={Pbcd86B8`BQfRspxfP{o1((Mq^HT2N$ z?em=Toaf6q$Lo7t@B6!c|2UUAv-jF-?Y%#Ht+m%)`@W6U(zu6*eH$AD0^zBuDC&Sf zNWf1d5GFeC^~j^t7Wl$)Q!(-cfpCa_{zU?1W>bJb;)f7DL#QG6zJ!&lGmnL}tECN( zud^G_8U&J(^>wqbaS>w&D}urxz9H;TIMb6&2v37vSR;;^pJ#1{?S# zL$U<>*M3Y<0Hu9>S4#rFD@?5%O}7qAixc@;P!m%0=4kv zcJXBTQ^-H%DB5^hc|hEt5LXxapK>iMUA>^vjEp}!`s?$Lc{#iN)sc(mKd=K(eP>|LR*p7yT)rOUs_|FvUa zO~By4`u>mOb$0%bU3)^6ya5RQ0qK9_^wfLoX2Yvv~$}Yuwlj&b8{x5kpiWX2CnV%ZT%_qjqC#)wRD8Vl*At=DjCkp80--P_x z@`enst2M;-@qdybDj_KHcNu?cc~b^pLDm*fi~p6BzqkFH9BV5HTUQTf3#bgl*}~3- z*UiOFiuYfYe{1=dUP&mpI=OlP!?KYPl;ZtQ)qmsqL)Ro!T|A){E>HSi4#Q;{P=)D?xEVF=0LdZeeRnTW)@SQBiI&J^?XqTVVlVApt%CYkm>2e|GdY zVgJ^VrUwM@d=^fBYx8q1)<9?c78b&ymg2(P0)j$9+(H%t{M?peg5uoPf@0!UqN1YW zVm3Da?BYL&`M0iAAfAA}KK`2?1KRwDZ_~AL|DU!0Bsf9-@KJ6S9-cNoEl`^ApCk|ivxetI_PYdt=yM_4Q z+13B$Lj2uF{r|ZT|I%bDdkYsk8*3Tfe=6}mEB<$D_vecKPwV=p#s0fBO8wlTBmheW zw94WS3*wjJ{kOV*_55dh!=D!E2;Q2{>S!^{8k-+!?FoqUtk=+~@&C*NfK7v}>P z2vkPkAHChE{u|fNee_QkuM32HHZmf@!h*t5yf>?Fa%e$(ZJZ1hA%HLT{OMB!ghl?r zbd&NQIgS1+=iey*;QVv3{uZA7VTXU#0>K{;ck=!fc>bG-{^iU5U;g;ll>T2zy&>y& zCpQ53b@4Y`zsBbV^*3BM0Qq(CH(bBQ=LYpRTsHvub@4Y`zsBbV^*3BM0Qq(CH(bBQ z=LYpRTsHvub@4Y`zsBbV^*3BM0Qq(CH(bBQ=LYpRTsHvub@4Y`zsBbV^*3BM0Qq(C zH(bBQ=LYpRTsHvub@4Y`zsBbV^*3BM0Qq(CH(bBQ=LYpRTsHvub@4Y`zsBbV^*3BM z0Qq(CH(bBQ=LYpRTsHvub@4Y`zsBbV^*3BM0Qq(CH(bBQ=LYpRTsHvub@4Y`zsBbV z^*3BM0Qq(CH(bBQ=LYpRTsHvub@4Y`zsBbV^*3BM0Qq(CH(bBQ=LYpRTsHvub@4Y` zzsBbV^*3BM0Qq(CmvCYK>s3x07vTL(AK*nzF_6I&@RBFJm5L4+1oC4BfkMJTpwnyM zdmRMw;sb#;%t0WDbP$N#HN~u383ghbQB{=J^PS%M;FJBx;PLgf%xc& zH)7`^-X{Ci1QKUmFIoOvXmHbOh_T9cNBTr6?_hM0>CO9fGO}keOc$v1U9)B+ycfG& zHEj!}d3cx%8fz|^E3$3hc?v^0TMwCZeY~f=VSH?Ym-IWLP8Iz{5Uuhvi%VzY<)`|1w@v|g~o&ZCNA^< zMaisU_ci3x<#(Z_-8m3$E?O=q7x&F&g25~(B#JuiAe1wd9O(rT6;l4!#1Hi>jX{to znhRVwCL9Eh1JU0e28DPkr-68p-k_M`R>)f9A-<3zIbpx?!3aK~hI`KQ%$v@eTg3^1 z=AE7;v?sI=w5uac8Kx-I$}|R{z(f5*=#XTwDk7tR2JxvlA)?4t_!mo2)DNxADNQf} z7-J|e2oXGRsMX`!pz28*ule0pw8q2P>WxhfaJU|nUKQ5g1Z!X zYB#z}1inNdo=NG*({An;XuA$}@SUwsDFDryXS$IQ7wb-gb&*8Rd$+Z6JaS8LJY1kRyrw3zHB`2GGE@F z2MWi-nV(ID-MbeHdweYWdRP^*%oISk;Ey>dzQ62`3By>%h~4r;ri4*$MZ#PSL3V8| zmRsO*2IjO}l-6j>R6|viSD{2i%*>dv0$fl&sPV_6!46M+l*s*~RRk?2 zKzbGGYFMi9B9)5TsFS1AdDau^#$ zb~P~iCZlRFhLeMrKI!-lk+=v&l7km#_@y|X)G{J$QJKL3BjaUB5Ev`Ms988-mvN4x zqs&r_)VeZWo84T0pW?Zct$31lpN0N@dXefYU9{ywTpZYwjnCDIk4&2o%L4O3^UvDv zeMKW)djtyE3<+MuEhi^$EqPw0uh@v+|JHqpl7m~lv`7;8<37ASs^i=kOP1|RYfTF#GH-eSpD3N*BCB{f<(yYBk;TZmg{f3Vjz6 zNzwQms*hijELQK`a>27P_Vq@FCDL4N2`U5;49vl0pw-n#AiL6wK)3*T(>olvbi6TT zVah#U9#bx*%CfkCgKjxvrF<4-$}3u8c6s?D(2M*oRM)AtndC01fqpPwFaAEKBwC?G>;~Num#9 z?p|#$ZH7pqIL=oj5M);l$dNN3u-z6ckJTbp&C@(7#u#&H3a4k)o2<~56UR~|^|c^I zPo6o9^uHk)X15yUz^w8f)xCK8m~B{&QY4qAyTOtkwN}65YZ6h8n4FU}K}~W_m+~=T z##T@1^F2naSwo2!RW1iUU+nHGB+Xps0XaJ+Pyhwu&{BFma??DGh2Oy@+gEV;>jM)1 zaor75%`#8Y`eGL;$tY+!#dGB0%C+8$42pM8iD2;?h7pv&>}yKUV+nQ{Wf0V{p{p@? zyB!?h#VP(-U%~#jWOX+1nU^vnppb4XkTz7~1RSTa9!OA^+FeTGwn(Way{f%7qiE5m z+@Dik#{BSTH`SA(UVtr0i)nL&p4mM7v0Z4jFG|SArdM)_gDYV?(G9hqP1?5-1K8B!Y8ksn3;GqJH z<*Ll)RmIeHp|k&)r+BoEmNgfc0Kd9Xgn=YlBsm*9LD5RU$X!*OS8`Y=Io}TmIaJos zgpr0lwLbb_>k&3h$jdV0rSfS-<(rT$hgK1`hC}Vf@en1|MWp@TLcxQH7SC(DFmjpd zcd>N*>6m01Y1Z}KQ=EO1ecPY4+K<{L4Py)x;(^32LGaCXa&j=ACVNE(sQQ`2a85(f5oVYM6NaN zDt#V)Xz}3Q!Slp5I#a76L5vPXN|sjAj|~OB-ngD(;5E~M+hM{5G7>n)M21rKbP%%87hh%!os5qPyD%M{5CX;B?+IXMsAAWWP zU5|r8Mle8m@+Sd#D_LongWdf}^Z`khyXdu!RSXSh9BL-zNY(pGDkUCw)v5r8(*vkl zss&>Xw!S+Nr)Ze8Z~)tp?Q#!a9I^2_r_w83{3l_LataAFjv)r~S{8#}?xh=DmW(bO z=08P5QT{l!^3|qk^d#mMD|@HF-e;^!UJsRYDWHTsVU!{-G*%o}Vc0y%2@(kGw&+;Q2dX+8F-O zH~sV@YO}AhHO#mfrA{c)sDES5?S8ib{heHzp12~d;V!mSFE!@?*`d8a#U28{fX$bc z`9F)}v6Z$THfTpBxTec5!z^rMg(pnH!?dsi2-cf0tBOYwb5Sq@IK!wMJ5(EV2M)C$ z+l9l;sScY0uLtIQhf>ia)MyL!u}KW5&;29Y6YSiUz>`h4h9&6=9&ggT@{tXLQ#*i7 z*hWlo<=iCI=8#jVKw7~d!^q+w;|~-hum;Bam4W=khT#jX7f6G&NLnE~0`gCSiwRsO@>zN4r=&Y0W#5A=;b^S+{cT))mAlPidl(ywb(e z$#@_osZrk)?Xi0M$%j?lN423I=hzo2M1@k3MqfjB+xRHh=xioOGbL{Iv(}$$8|R(f z8kpHpZp5#@|Kbh1SfHF1vMJi^3@8MSG!M3?MWB&0quyoT+9G7jEA!&D*cyJAZXAfl zR$UTB*m{oqq-w-5NGSA~lTxFwrGljDPH@<0Ye@`&OHc0YaTZsu1oBt~!jH0&iXr=a zubC^?)U+KpMl+Ia$aM_;_?O>|v5kz^q&V!kHSgQY(Sv~%q<;xnRd_3X3=?v0BaaxB zBW$S~x#Oz;#8nhTF9kw?;7ud$qH35G9b&eSC$q0VQ+`-XxL+BJrG~k+#ZO|!{uEuK zmXw!>Ng^oZM{bYGu3tn~DEFjpI2&YvMF63nIz{7WSH5PKs)1IQu7tn`{PUAQs=mtR z?2aeuWp58HM*9sT-Qi~KO6llf_sQ?FXXd_taU9*M;5X6PxW|I`uB58+G~6D&03DRg z2)dwzzrYFFP>UEg6|bLS zsB8zsOu~5x=kGl9>f0)qCBr#?S47MvskLLM6f{O^Jz)xVUuQ|Ruvk?l)L2HFct}ZB zzd~zWq_X*}VR-LbLrmgUaoYOFxGoI?;pIG(;4x6hGmMZn<+^&Vc|1(#an0kx`&l&u zZC$+&T=S=g<8o*^O4Gc|E{8~DQ(mU+v~y^r1YMC^znfEX7_>_7R-IK8bm96i=`j`e zqkB^ru+uM#cQgtjFP^ApNbDLHk3Wjdvj6lo!CWuyUP3e`tS7Fk)TTM+{^7{pyeP!~ z5CgM8xw1)=PlhaDP8R<{7VyySAcPQnAyWDX?~4u@r>wHe(F0tQa~_X!o*pjS8f@Zw z&Jfc6o`hmI9p_2iPGBV913_6Pz=3Y>7SCwgzREvj1)&2W(ds?zyIPO@7@n<~X?k~h z0UBHFVU_;;0tXZt{mkhj*ji;bApMPh>wUk~aIrFxS>F|?kWL`_D4;3x2&8$>P*~f` z5>4wxkV{4Eg5&qTHB#5nmDL~Z+6Rb^lf42g2QZCdt%8v2DLc33btf8~J9$Bk?8XvY^}LxtQ+(-8@TXY$BbQ2h?oc;e+vKqEyJp{%Gal6@tz_ z`0zY5Zz`NF)^i8_XoEB8PAH%U{dglSM;b)ud5Ws4?tLsRis*$Yd9IaMv2dD-jyq%H zMfXkJm`v|zIGix07je0?Kp0>Xq;jaEG;ua@6rAH+w?PQO`9hon6QTtP#d{}OJ4QXN z6W7d3FJ|LSC?15<$YcPgD22hB?$%b36x=Pbl44s?rOmk^6!25E2@QPY-VhNm>WZ=}M zTxi3ON_~9r=8;ZBK=ait1YlS_JxiO>7t7Ts%J*AGTmF}3I*^CTP^fH&XZ@f|&!B;R zgxZV_;QFK+f^)ym$lilG$&RfsBZjOydt^<>XuLl9!rCcA#f8k^ zMojsXWj%*r`0dgvTj#6l3!Mx5yYJBi%dx}XsFaJvy7pwWz?EmMIl+UnzP-$b^n{I+ z(#1}jO}HQfBw3>vIU+eIl(&9d=D2nF+G@XpDkh%5Tp#CP*dH*DN{X>#8Ow0Vwe!5B zk<)`<42H6ITtlp?OQU8&jA5t1IPmJkR1eZ-V)1rg&elBf7&NvPo@YxtYqyE_U#T4O#=p5L0tDPe=7&5o1T z&DY`2Tv!F(1(S(=IXo)KQk$l`V97ZNz^Y3vnw~D%Ja`WK)~J+#P86V70$rkvGCkXj zH#u7A1V8e06i247ddJ<}pHO>v9xOj{3>-OlOiJ-PZ#UN+wKrKKrMHjBshhMZ-R^Y= z?fn5NG-%BTbiJVHe;ae3I@@)hlkSy#k(GFIY*5wxf-E}L|3j&&gA1G-bR{3~S=ngFug5pO9q)>( zdcGvCCg5l~5(C1pgUnOf(B?|U!WUja_+KmSJDNt2t8b3~5T`ds zosHF9H47gZ1&`GE&^y|+*w=pFX%~1&(|F{B9%6=a1v}$ti_M`yg;eiy2X_Ln&wA$0J+IXEE-KbgXM1ZB;A=j zIMz%t^C2Bd4s0fcCbm`G&vNjmTc;tYUZe_^b8?d$OMGsgj`!@E3k?aESaj%qOE70f zrH}5qv6A}~OS~F?=|wgYa%+-Nc0uFC-6w(MDN}x;widWmSC9@fbqsdp znxK#@r;q(f!+?MFt-)=oH(-z!HFr*nmIIO4GN?c9L&xS68F_o8u{Eor#i=!+u!y&o z5#a@k|5W?n2m{1`aGg(S&b=VU*s}z01+Ch#^NQ#7uI0|#eAN$PoqF=3_uIje=1|~F z34D<<8|vTrLw4d(nBnb(bL-qIe&tGKmrk!{{tDyl1rBuPu5MM1IY;)2miS`3-LC#5 z2hp0KG&$r(MfR%$r)rQ4=)%P$@TxDU#gN$l`>9d?2dWraohwS-IUo1TX6-0dn2)U5 z3<~V!bS)}xijB!TILYs&W;2dA(NST&-*#o0xgQ4Zwa7A%fD>26wI&-iZY@g;0kRHZ3nlDhQ#`v ztsJG!Y(S15>UOeqTJFxs$d~P|^_w|wce1S-?ky8MopzUf{&mc*Bj7BG>nU@Hh1U8b zrL(5Iy8X{|z}W?Q-#+nSX~Z3g=4c(*=~mzjt5#Ie$-H)Y0oi-}u^D6phR{7flT;X& zhIym#@rNVVaQo};OzD@oJbT%qUi)rH?Q;sMQD_lDPaiuva zt2q4ZTK%}dfRJWNH0j>e(|x;WqrK+@gf!p5MS)68-4jpAC5+vOxq)L#ir(BbApI| zn8jA68H0eIn`v(F^j1T0lo5ayA-uDvzy(sdg9$O;PmRknRYW*<)gHk@IF zWiKON_~>Qzia=_0uTv8ZZ%Rdp<6}Vy;l?$jdEHSP`_+ZC?=|JYksq^y$o)Mq*Y)l4 z>)_G}(?wjIZPeKpG1-XA@EXhBO@+!cgbi&y*kApCDk!VrcYX<^ zuu2JZ6*=56TpoS_RVPa%`?Em^DW||emvQ3o;%m`#R)h5Jd8Pn@k}{lieZ#Bg1jKZc zWNg8mTY&Vib~q2wIj!AB7!_bXNFRGG2%o+|Ve?ouyH^p+S#znmRBXISO3+Azpv?rc zQG*Q0TyW=QgbpBU{jQpax{jMxu&N;J`~Ic|d2nf9L+$4?Iy-`~Hk~s7I!YxIte|1B z#Gb7vSaVgW7@f=NKU98by_C^?_pbS9KO*_gX+1CFhfC9nJ|wV+aoz_X6o$k;;D3FT zL2Z!^U`w06zFTwc9IRRIDSX@&I#ou#yws!P8;ctVJM|QJ7<3n?FSxbls)CtV^1_~> z<^-#vjG>sP87p5Z;$hB^Bt3t>cy=0ds{bIC%e@WaGI`t&azpZ>n5%phDjoDx zZPyRKWqFB0gS5~%oqVtAT@`)(C6Zs;rCsA*{F`l8wqS+3u&47>BABWUn{%lzOjT_9 zl)rRzZM}TCi=1+iS+24ZfawqAb&=9d8l6|3Zwpz_J$=fj7TTF~h9UD@fD7I1{2ro4 z$krl2KVw(unWnW9oiyxHmW_RnC`0*M6~oa1YHCuJJvzhs<=1;PJp^l0;et9})KtE7 zhy`&C%sXBKc@hfGbL@_1y5k>oKa@`(lSd}KlhOB%8QO#?xDVKF)m$Eg1&v&6OT*4_ zwzlXsir*&#*k8)GpOgywSZU&ZJNR%7+!?Sg0tq4McX^AM1M#NUv=v)g9|Vxe(Ik91 z_(R4@_IG#XFhB~zmrPdv>=kWM`4d-IE2CnT;}PU&!(EW|g3a+@`h!V=Qg4yrKs+Nu z^>%fUTd3w<=t_co*mWE=KEEwl%TR)XURr-V;H6LynK%<;cmYK{B^m*Q@wSlqudH11sawp1GB0u%-JM zN=*4(VdDCV1~_N9cHhnchU=>hB&il=uq1*JK7eX%oZb>#LK+4bN?M>ORuipMo(``} z{f}OG4Hw3Myr{;kg# z6ioR}+(s{sZNzpf3Z_WuZ7vgQ6!Xw=z2ImSM*m^i+rjzsZTYsS>>xt8H1JzhHc+zi zBsknyeIi@e^gVJv=J7korp_XqDoR6*RTCW)L=4s41m6uxrWYK`K;ca>9{g;H(%1cXk8I*VoG8l{z`ppSRSV>|NLEj|%7 z2jjoAUIRVFf*y1|3Mt~$6vKft(Noe!V8>rjXnZC@4?fkrrN-d6Y@{7*qqJ9u)!cT% zUl2a1?YQHjcu7^8P7mTG>gmTfUs3KeIpl~i$lAtcizEyK%q(SQx2HF0;of4zqc=fQ zv7b;h4k-eOzfpSgb+du_-H#rY)gN2_=vh^|bgY~clD&`~9{es}=?v*SbnRvILQ6~W zTd}jIJ(a0Xb4kx4?M>2k+_qWVXs}Ez-nd1$g$sS_0)p4L8GtRp_&Uac%u@pmz+?to za<$Zw_G!Q0+cde3B6l5UYNi@~suYWi9wenOV{)x*eF!pr?d*;8RH&LIy4R&Oa(IKY zh6Q6&n*mWnj1sb5Hqp~+8gDs=b#Yb0JAnkVTih`Fm~@B#bb*YIwelEu94x z?jo!}*Vq-<1N-De{)_ad2US*pLUi=wf4xG3^+2|nnwI3JfoOWR`0J;2*i%6U>JYkE z;+c~>&s)KI{)3%t8UqegY0r^?q9VGFJ$k;P=VVO?bNfUm6v;2tAmVZKaOGl_$P_0m zgh=Pb`PWDHB*Wk)nY-t=J1Q4uv~OQt0(YT=?MYovT@up#+n&IsQ%N(NC;jl%RB8=c zkzj9yt85zgu@mdmCJdYR8}W~1G2Y;hoHVfFKW&>=W_c4RI{u@*qM{TY=-{jl@!N_9 zz+lcEZ}mIW6Me;SgoVZyNv?iN5-y}MqoY>Nn+Fql#I9xHA(Nau*L5P3NZv|1kJJ!t zgVe%0iss=6MD1HE4?(1_)NN-U3x#T@-#M6X(9(ZvTET$_QMpvS&Bm#=66$I}!m?LT zC%Ec2$XT1^3gp)@8_14ns9$`g)g**_u?gg(v42ax?l^N~uWUsYlw>7(9Wc9spGkXd zX6c{XuIeL<9GtNE1sAjUea!ab>_cMN4#-aW2-h9{y6{weGkJ^4dI_cw+yhzxwxCnP zHXkXnc8t5ym(mzOvYO|yuu3f4Ht#A(C{mbTaikMSU=?jD)_i{&#TCH|3gQB{gu!gLAZ{LOLNZJRf&r1V@{W1XK%Ffr|DA+PaLZX8jsb!lzp6U z$JwTRLjXmTws~^l%+oBOtB@jOPH|kilDbw-r2)_PoQC1DD+z7|xvhO2^(CRYFd$-; zF&n$qSP{4uEGnBac#LhtPG7DRp*zG{zMEJ4&OMUOmGP*nGW{8oZ*td#ui+i#t1zlf zuO!~m&K3TvJ;~0u(O)hoo)?j88J<{vDO&)BUL3x1eignJYFAp%>w0*aw@LQ)p~857 zjIp?RWD48Jr-ni{$wYE+0j@g1G6|y?A1XqoqmnUzd|mIB23ltsefDj{PW)hOeg#`U z-)8AGw&P4a0+p+!+bC33Ak5-XSUcI z2-8ekuw+x@lok=TceLg^LG-Bvxp8oZUI$7ePK(q7if+~TMbwp1Hd_}8M+XW)(I;H1 zLLcu5#nOS-PY7#|)@R&@HRqIf7PGWH3$AL73&xwmE}|S`cOO%W&XKK5abQY%>b(wF3uAJbz$KDwS_k;; z8oT1qtM3us_w(*VMbWzyD1j^m}QTP@ag8VY$E z&76@lqw-FJOLrPz_khXQD4z__a_D@~esoRb(1s8J`fXUUcyl;2R4;XaG>=BRePq#U zZ{?6$#%Zcq?;|~q2o+y5XPmp;$1SVCUep94tC%xrLtRMa$9aZ1_%Tj&LFi=u?eW4} zu8)wNPhg*U=7)#tRy})6?x~PazQapwBpnBvyxk@TEzAI^d?~osWw={;ukrjKaGx8A z1t_z=H=*MN{Wt=tVlSLxnf=0E=S9NpapX6eG~*9LxK{ZOtbow12)JM{eayaGM6R=G zT&O9>aI1cQ!HZoZ3R_Ao6G))|-jChMm%YQ2|DbTP^L;GsteO+-nIeHyxNt27aQ6qU z4-?W1nkx#`k%{G&W%BvR8OFaDE&G{GLucDW(|zE9ko2K+OQM#^`~)2-DACfP0$n7+ z?%Ee&8?kuih@aIjcy>7lrK;^FbnP366u%7l|jhZ29X$XFAAM#iR~8Yor&lG z3s~vQZrUupHZ{Id=2qT3ICJo{o~U4P5L3gQ+O`lAt}Oc=TZ02uEBx~9VZr@}SucUD zocYTv+wqzyBStPm>p6eHd7koUeP;-e?aGnc1O5jq= zS&gmXHupR!Yv<0@g+kZyh76LR=$HGgAT5#!Glh+sYqUX}I$d~+T!|0uM>Mg;V!Y?O zgKQdFCbgl%Pt^H5t?DwgNfF5uv6Uz0B%k0C1`|HII9RQ5!ueTAKl^q2uFh7~9PcpU z0Aw!ndY^|SvXQggO|+y#$ChpxmGrwZxb0gb6zRpw(za)le(G-NpLb_(E9X^i6ETxy z%*bqbZ6OXyCaeFVdT4T9JJ7L_hYayVTXe^#7uws#32u(*P5fKQACAnwNY_Yuy-rVL z+0lzqt+$h2e(*xTYb+}UxIp-kHlynFp4_UIuEYkbl~kO;1=~icng##qtej>?1RMh8y5lli+mY2zyvg)2_{iNCY}x#~*bg%7e0on?LjKi*J3Drti+E|1M~9 zuZWKoL*8p~N;{xPL+T+bDz;KU56TbTzyy7`g1;$X5@kH%c^)uj%lCA6=UM^c@NJ{2 zP0Y5#Cw!vi70=nL8c{`)GOgg@xUV0B$hUJA@L-8tH+1Uhwo_|71|_vc+kN1Lso4|QBG zqC%c@)v%n!ZU>R@4hfyl zQNHBoj5TvfVUWm&zRY+P$shF2zLs2zw#AIpM)ZE$sq$P8u6{B6*nl7LW-vb@ zR9sjmxEe>zZ96{)aK`R%6tlwWb&0L~%U+MG3gj6$At=>?2zJRblQt%`&M!CBe7MyNi07i8qZ!;O(>CP&+U8xP66F zQ)b+Xn!r&B{o!jWv-TuWi=-X#bJ2ot!sy5IrcX9`?GW6-kriY|PJXEQA11_iDF!=f zr&TSMKCcDCUIzkt zde9qe=gT5 zSrx325?)>>9ln$leQhR&w9e?^l?2sLVzGQ zT&#V|-4ekw)_UMoW6qu!mDsQO1iWDFZKLRs@sha+xOH0MXUcK85}gX1&%=b>)Iba*HXwaK+n#i% z`V)cWiEK30=e4tI-AfG%^F&&DpS|*o=KX|8CKQQfsKyR=s5blB=;}aQbe5A6SuLBQ znEtD$>7D*;PK{|FeFBIU^!PjrSE(m+rzJRM4bji%MJo1C5T3rs*7Xs98?1N4D@ex~ z@q9?WCgFh3Jh^O9kOilCJ+}#k1}9=I^^8oS%<%UR%-q@3WGTaiQ`)Iz zW26xQav_2}k%?Vp-c=lcn(c#?D zG1rJse!maa(|;6}Od})gmO>;b8up@>@8YfrnGQ+jW(O+lATBzU6>x7O+{?<)rw%}o7Xi1YH7s7PIYTG zr)H$}(T$67=)7b3oDF9o+h|1*#Q_guaf>RJ~9{Zzy|a+=K0qQ?Bie>*;{0 z&CN|X*3{0nTIyA0y|K~oZ7(l4%x8F$8|v@E+yh{yTJK78+`i*EyLuvCe^GzeNW}Zv zyOK3Z{IhY|vXn*-HZZ36t#1LG3qO_%C_!#2Jh*ru4yAtQ&u$~z2go77b%n4yp}+MY z)`T`4IQ~McsNO)^q#4V~;cXC$@6%}y34KD!N%@h%HVx+UJ`wP!yuSY)xs!y)7i~SdP1E|CZ1(;{>_aAi{hM+sZMs6h@BqUyyqp$SLkjbB~B+9MR-iM?) zp{{w+vc=HhyYF7LW^jo!Wvtz9!4g$)JP?pLZ*-`OEYVa}7@qbTXLO}Ws>mD>_Y(x? z#To>q51*9-Dx4djx*p4=T^}(qt9n(C6|)n7=+fk7=V!(Expx8WFa<@?a^J&?M%6-v zgqU<+;?`z&eo3+-`Baal&ra`HBv<2=oFc%4NNy~?cv7^t1oquyX|bp3ABD~W=UP4W z1N)6le9+HDrf{_8t$!n22Li!5lUUp((xSnXt&HDJ7a*WX!WDI$;)IcRTQO^|adG^8 z8$`l&w@a=9bED~8fjwu(E!rqYByVO`0eWHu4Lt3nN3H*UkeE|QDli#Xv}u%83tJ2zw@d6+Lmi=OwMe?#18yLqoc<6J z)P{{X)A3ywiO}r^JaWOc_twW%ApWw_z_TD>^?KJ=xqiGz2?|MNgA`Q66dT4B!GY&( z!8w_GWt9Xa&f+ePK$bZiPGikqE?f0oKl5pf1D3zFC|ak)VyowL8f24Y)_>|HlHBeG zuYBpG&H3&ucjI#*qQSsPV@gLTuUdNkmcJ4AQn94Fv7cSd`v_MDETWA?g^^e8{M;^Og z(wPK!NM)(2eW?i~;c3d~=oYm|pm{{UY%tGxVJ+yw5iafb^vB@!v&jR3TjJGdo*r5R z8T^HaWJ9-pR;?it>ey6ps8%iu7WZ;>v^ONpoTQbeN4;U`EpOp;UD`%Ug5LSpPRLj) zg>BG%I>S20Dck$+CAwKj(Z|N5kN25H>lP2Zf z)Q(&9$`;5r)0ia!p94Jwb9})xhF0f$9Vj97^0;zH6<7nvrO(|m-@}OpC_Du~0Ftyb zi5WDbqcDc{61Q^SSe8_pdr_*?;wQ`*w}2(?;4ZYx(0%U%>xS&G2vk2m?1b4}JlO5= zcpWN`#iq+CI{b}f;Ynv`xv^0lmVr=CJ=f+(3IU2I3?&#=sr%7v?`-NzDHF??_3n_t z(R_P?&>8qy&-*^C4Wt^F(eh1=>z;Z=bvv6Ns|^LL8rV1QZcc^|E-^oJn?76l)*Y?8 zSn*0u$r6zqlKA}uh-_o11JXtnU?t;qiNlX4v}O?k9G`hRlE4p1P1*b>lN9+&q5v;c z=2iJA9SJmCxxk$1p838SBRAaFKD1rfEuGN%vHkA1$_3kI?V&qg)L+A;fHM z-Bja8ySH%=eBw^-2OQohwR->Uy+KpVC7Fm(`3^;8;-18vqQ|?$uV^nsx)XSc_k#z4BLYH8@DsoLj}=m2AJFe#6fzQ%EwbJXOvtRQh|Mq!e?-(wJTPgXgIORVMy z!abKstGwtxy_JV=n~~;?gxoFjD3GGS13Q#o`2OBvR%yj-Mb?vz;tQGnC-ZNQp89Qx zOt+re0ckA<6UQ*(YoS}Y=pn1j4*`c1A2Pfmh7{BY{%|1MKYM-i=Q^Vo@efkVSrMTY6}{< z=d5=wZac#+aW3prj8|VX;a-X*6a=Kwbgn;&EG6UjC@fWB9MzB(lqAt=iZ!A#23{c0 zjkXQO-%!5``IlEBH-rsO#c)A=wnDgdDN&y3`{$# zAD@JT4SmaSX>xcq7&)1Nr^zpjO-NBtR}k{4G|X+M^&}!~=$mQoLkv-UZ=0iqQiJ3Jah(u=&!V*JB%hbBYjNGX*IoMDg8!fyO=Aj1M`{xHJ^2LsSNrHU)oM7>z z=g_cX@lGvYsP-4eIHP!-%SgD z?VyrdIY;aXsTm&xx-%pfvAa1g(6S9Hrr}ItbH0f#FMRf@&5=WhW zv8iiz8*@-eA7|RR7vCa*uAO==PP!Ju6Uw{NCC4Ws5tY~b@{9dtK?FA7G@#A5WT80i zn%31(C&{hXvPV)e%{?w(;~V4L94VH|PT8G-r~9!NnevMjAA%3rs@-zsPZ2eGgm`%e zcZ>3I9!X*Fj^Y);tyBx%({=YI6mHp@sE288>zCXM_n+>`vS9{ZYXBk~x2*T_NPwl<8J96b1u5drQ=$aHROTU#v-n^+1Pj4z$v(G6k}H(;QpdvCg_)UyNOsXlqKKqG2XjAtA@`AK zZAR~R*4XWU^p5VT;Q?X0*-<*-P7gL(Z`N0#K(4#eQ8-fFrGw43CMuV;dDXpOhK|1Z zoZq^LP|xg>=4r%J4K&fQClj+YB9u0Sb+Hz^<@B%^+!T1K9fpT^qmTAwW4$-ThZD+zugE*3L0?60=ep;Q?^@qvtY%9vceU;Q0X z@QTJ(_`@TFV_W`(;5@kz`^H&VlS}e`y$hC=a`^$f`rU#a^yo!V4#*nyvedPitF`#t zjv>a#wBruL)!}wmUK372#w`;fT3Xvr%+2KV z)To3}GJ%QE=+>mt2OD;B4@KAj{l@84Gg?g@Txm}s%uM#V0FC#`0J&7Yv-03hxPO~m zrH}OSJbn(5^bjo;tVjJFHOos(U0mWqp`ugzZ9^uwPlV5tha)CYmaReTl4Ra2Y`1gK zK+Q(WG2%f;h>tHjC6v|z=^E-^tR-SVnlJO*9vX)aRDHbyqohAW0Z!HI&dq5b2?BG< zHor%TyKAjJ*NkXaK+DK(;;Of3VH6DG7Y-92l1;*BV=&fyrHfZFaq6cNT~O1gM{k>w zmAw_YyOfp2B9!hJq}lM2gMb;;A^8#J@A%NA3`C)Nk4jx+_AkVQ)8^3BFElvS|k`__K|4 zG~9<@R<~+K1Srxrd4jP*-GFll-uoi=+`J$6p0Nquoe{l4^@oXM?|7DX+*d_0%5JBo zG~bA75Il3tQyk^!y_j@2L{{r@udp`>(KTwOQylM4xMg|ou}{ZX7w<;Qn=I=qp*G!% z{-+C@YrX<=q>vzdKp;sgRH5lI7o2HabYo1-JB+YtLv|@4#{aKX$=diWg z@j4o=FUf_LBfb`3KFB+N{h-~Q%$hel`NeJ<2%#}}g1vkG#qHc<%>7CukW||jU*jR& z_!_N*%!!6_@BxmVL3WZbR8G@Se8~Y-LqKHSe=;1S&nyt{?92?hq|E_-n*; zGqvYc*mgK149p}M4|k9UaYqF1`1`KoUlc7-nLW5QeD1$%uu$i7Z{LsT^l6@!l>@gVT*$rUqTl?YbPtJ%$^~`-o{eTzNQS=GHg8IvT4U~6C#&<#IJBU zTA0pzm;L9Xy~)+;lt~s?jsK^mtBi}P`?^DScbC%LJv69DOG=|6NOuk0NJt7uDc#+j zlG5GX%@70gzt8)7Kg^f8Gq=t;JJwli?%-BOBXw&`7De=uRkCe$c2UPON-J zTB(q>swR#K@;+oeHGS09ddiwp$j_xQT+78!uF;ha3nd-bOE>o``9XLnj@~E???)vv zE|Rx7^c4QJJ`-4y{AT?@(PH_W1v=%S)pDOKy2=sX_I<#;>0$U4=PNuJYS^}6}k z|5wU6EEF|>$pE|i)$f)ztimXf_un)}!~QzK5uI3gqY{gJ1!@E-FhMfQ2~aP6`;i31 z!zmfj4I{Uzh7E-;`kll-^i1CO?`Q)E`I=c{dX^i?h6@Pto2Y}>+a8=u7Y zFBNA#BvrZ`Pm><8nYwMUV;9&k^e0Swn!u5e(B@Cp+DzJK8$+YFNoI1OE=K#rzMHG| z4kZl{ZxxdMt)JRAYqVlerQt@)7{W07A;T%fSf<>C#Kfgh8e;^-PI`WX#YEo-U2}N* zt|G_r(AN54d{E=C!Hdd(f#3n-j~-V0d%Y%HlY$Jxd7C_=b44MynR!N?qYD$P)?&W< z6JBVI*2^YB;SqX8ruL*SD4gHEkds6}5X-D3<8SdOlrC9V-cJJZh~*L#daY&WdFZZC zuIf8N)0f?b174K{AhPivI5R1k;nVe$jcynwGA`UDudVVU967#x4opZQZ)RhsG6|Zl zC)6B2e!vk(9h{l5%hQs1gSDwAZwg5}wKX)$J@kiR|^9b)p_le1a9YUk}P zv*P!`?k@htt89C(3F-QgXr{Tlm5j++oC?AOXq_j1KMq`=zst}~3Wys0#MI_(GAlkb z(Tq45JzY%Mr4t%HMA#UJI8w83e%v9~7~OI$&#QUl|A7Y~6G>0MRZ9Y*wD0uSM4mq@ zs@RHnF}HQ}jH9OOFyY7=`ziGNCeeFDan`2VJw~!A`z4;=?~g?hYto$FF~d z?=@;#n&M4q#Gl(-CH_d%IZKc*w$i?CIhE$cZd1TAaly0qp|fA+&%E1hvh(x&$qm!t zfCv@`{wqJyoUV6{XECwL`9eOEVv5{(|I+4jT~0nQ!wQ7Z_WN(qe_FiPiE1()DgNe( z@gDp_4Jo-@VVjpu`NWQ+b8{d?m}iLXBQ3wC!%LAxiJ|aS3G~$->>?QIkcLpWXd(P8 zC2yxHOv+yDyvd`?k!w$uFVkCvSY6Bh={@nU?41P=%qxbCOt(7h?!SMVFk-?RYq71o-ahqe1&M6ud*hl&{$}fyqo`;`SBMB$ z2P0mNn!oG5=EWoe;)6*0RxN?bryHBF4$3?exU565Mjg}O7T0Lo$bM#gC6$yI1 zafO#!cKT?It{ZSD8VCBVrf;s0zq-dRpnq1m4yvgj4_viy&DD-5<1cyB$J%ASt92$ zm%rIYbB*<%Co~s5X$JaaduS!uv51segsTDjA}+@2g##kaKT^K|L_vLB-bs zo{^(%)!xIrxBnL8WGHCY1aRJ=6M$5WT0z~OzYWe;%Ja4;+7w(45_VhAz9iAbY)FT0 zkrx{nw*%Ca*kmASjJXW{1G%eo_1&|3b^b^Rn5l~7$|v-1GAc=aR)~7oLXjQ%xGqR9{{VQ=%; zu#Ck^@ZWVX-%(Ooe>7Mb|BxQHLH!$(OpOFQ8%C#&X16TR7iC7w9N%B8F?fW4#LSCt zg8xq;Wgww>@&O4uIgT#!m|tVw;A0fc8*9g?e2wb5EbY`G>U6z*7Jk53DsLsgnY{)( zH|x1E&UJ32U^-Y8SN~1o;)*un%5pq|(K!mxvsuZNrai}^oYguh;{;gs>$NTKF(17k zYXyJ7z~7F`-vRV~$6qcDQ!`y$5JQc6M_d7eH6-UA`-w(&Pl+H|5>g{HT1g`q7%YP; z#RDTA6AyG{C)Z}WR{q3+5>`C&P2G*|&3n$=5!Y31jT2^=rxILEGBoOy#$<@vGsPT( zh|)c+iPm-XYg!n0o*`x<&J6TY+fH9zBGRrsb)|I8)D(mtu+PBp@LNFe4Mvvy&0)4? z%gyI!`0-}C3X*2JT;5Q}<-}#W_+MZ6c7!uG$k5F`J{&VJ;}ExuUx)|QO!YhcFU$^X zoDi2TSt3Q01-x(ApT2l?WU&`$5OCx zpH*?2eRsJ^sZ=iMuuVa$Q^=deHoI@W=Ln3pADc73m-C z%(Y~-F_}Xet~A}LEA#h<{8)yk#djZaMb^DzIuJxAHa`JP(hD^rvTy zdaHJTtsj*@HJg&|=K4-)k#F=8vV{i^7+xCTJ^GoK1c*k>aL#@^zr3S(Xm|MW@|R3y z1Df1d}0Bd zF%#O3nrxdw$B4w5T$wNrl|V6w3`f+tCGoAydzWwEy#+{2BqZhppC25{x$pL-;@G-$ z+lu5Jddy!0_I=|wmq#L&u_r$#8z_2qeay&#*NyMS{r$t_s{v2&zz%%}jo70U8(m|5 z^gR>!Rn7IKyzvh=996`uHb+6O}Oy^>}tk6XRU_WY4!}8@h)yaJpjy@pA%=oZ{9s5)Gh=ZIIq! zy9s{!`}t%w&hp$}>1nGlZ-M20{QJk0Vm&zS>};wz&KY_kZ!d3kn1{BJ7vC;crShKA zG3Bcd4d11zXbdtX2V7}GqnP7EowPjs-{YAYM0}N=k|C{03-&3#wKKb-@gG@u>QuQ) z0p~P)Bg*?RNWJ!5tix1KTg~I|uiUx7GSbVX$BXImYwdspQ6+bDCJAcc=LKBRQH&-j zK^u)}X;(_U`Hy@Vefkfq5>eCDgu7lviK>Nc60u?}cs>(EfCEz?WhN~MQbPy!mcX~dqc5kTQnt8g;_8B*8DH=H<8HIUN_2SaWR(aURiBBg{Rc1t_~)`%Zmh2? z+mY4>?Tp@@Y|`Pk?tDn%X(n)S^mrB{DO~hr4wpgCteDl&Gg07s!Yz9?s)B-p#j1>r ze6^V(epJYiya;{`lhKJT(l-p2r#Ta;1D5o*YsCx;j2dgCaXZcOi2!Bw7lj42r$j(8 zmCFL<9t&1skW*4!!i5wNZUH-ApYgDpPk8A9m=qYT)xq%4Zcw~?m{EJwUbpA)4)BytYW>7Z^Ij;l;`;!HlljBIkXjygHl)1+Oz zRcQJIYVm&uDAz*Ei1*MCN4!?XlkdljhqSmi&_yqop&8;;jo^Uxb2b}4D!v+dtT~Ia zKapZJRF(B{)p&m2`&La~zYO73p}Jc?R;y5d8WcoX66|{D$htA&I38m~wjacd;_;#i zJ@!Z9nWbd)r%4{xyN?u|1W045H{0%&($b~<&AvIjh>aSJ5uPs+xop@rQ@*Ar3kwi9Z--WDG=gWyzNU#!bIC_To@eQ33iR z#dj(_7Ep0^BN{r~IL0&XG ze7_?Woq4;k6U0XHcSM#jdiE4X_z|8~2P`#0(gQr#QOK+O9}^ybwr1})S`tZbT;Iiy za!)%y(RU(R#vVdy=j=N(f4J+4k5Ol5{q_#%t63n_RfyU8Ctbj-yo%+~GWHYO9WjYq z?0mXmPree$p)EJ~`Y4meoNpXM5#WsL(cj;4Uz(I{`7#&};ED-S*J&AQnz+eZ=>oU` zEPCwBlNisPpa@HGI@vR zY~yMksq`lhz~0fYimaoyW*x%F+6kDzft+ge?EPjgm`OCVt0s}3VLGMb{bbC%Xi(b6 zvX-dx?Z&$Jwp&wPsJ+2JZ(x;$px?H-llS~~J?u7f&!Wz4rew9DP*HW-r5=Hot4rSZR+^bTlnb#nr(Tfg3(3rm z>r19LtKZ#!sKt;u^pG#UgRMDiu&B@184x7Zk@6`+&0}dm6W!_!TkaKo2JQ&sHSd>B z-_!@5o2%Yf#-2@ouG0@7HOs%4woB`Mes%U7M+646L5TJRRF*a&dpr>IO`VI3*0>(z z)p*OO{Y_;;QNMkry~DuqIenyMg8t&FFKp6~S0GQ(nQ#HsvfyR>+2OVX)ta<^;~LsZ zzeBcsq|i!WLPo-2R0<7JG4mbYG-u+l8VeTvqYR9r^R6Pp^pOi&p&nS~ zMpB$*;c>{IUgY9H4bE{-qR(*mm};bJ&vAwNh~)<2FMq?jPQ(_8G`_n3G7O#Zf2Q%o zWls>>7Y9UMNls1@sRTY(CWIdTLiB5*A3$17tH6%aW2aTeO)EeqWH>Hh7<3RDmeWn_ zspH8Q%rDNv08XCvyzFMF<5}Y;4k8D2l^x|g$(N$X4DcxneMs?-;M%VOmvCk*MMZ1g zQy1up|HKq0t89-OYVc8co?rohx1dM&*#*n2==la@CW*JxA}>0X`hpOfV;UWFMr8YW z_ARL2uRWgUSk%A*wJ|iW@gcT2#Daz`#-vYU6YxO+{pV#~Whm-7XB}pjfw;8D{*BoJ z0>97sG^?eWT`T^O7VVe*{(CN|Tn#lZJHJ2nPm+;O9dXm`uVyk2DcCxqvkwu?|H1n4 z%yCcq9FB3$lXNk$ks57N5GwEpUBhE@(74T?;cD!wi@oX%9_v(MkgAAlI17} zfvYU>ql?H$z8l;}>{`u5&9Y!CYgeaX-$=Xd&u3f7st49G!}WF{DY`bh^|M{x;W|A2 zU>ZDlYCs#XB8HBb6}`R^9Mxz2lDf`#bz9BXFboy3z#WbD#laB1Sx(-}x03ZC z9PT3@OP-kUfU(l?zDep>aB^_;Q#~Va5V#zmPcg6DUadtiYY43(>d&ew!y%b4qge@a zGvk2Ne0pL2-d|kNEGKf5c?9*Mg|yJ*=Imxe5{GI#y9{b1iwGt4ge`bmJ6up$vr{J8 zqZfMN!OP7@yyRi?y@dMJm?3?qChx2Ud90t8=Lr^_>Fm$>X`aNWwh3a1*4RzbkM~|FRM1==0>NJ+(-ibLGd&x$>HWFU)=VF%rllwg4DYh?U`% zTB2xzWOT@-WsbhKx6XHu=1&LwE0Bel3|`g#nAz5ui+)Q`SRZ$^#8LmivzG>+M4zh| z#Bb*FoyNU39-BU8n*H@6IQl^gD77-K9RKj(`?)5*-@uu6yku;Xt0r=c%jnYmunj;`WTg?B$~?KT@bzh z)A6KN=lgi74+v0t+=f0`P&nJx&6GdcI~R(ugYg<1;O;$XIcuv zgwBpl+liI_d1Km0w|TC1b#?ip95_MW5>(Kn#qBQdfi{hF7KaBz6&0LxPErvJV0YQy z7;aDFCa)GfEMv|FPmLm@3MYPO>UjG->tLPzAW38Lv3}Xd`|&W3_jN0IoTO!l{g05; zSv~Zl&a24fT2h~5zdGNm@;~tFZGc$f@psrv`{@hwzr2g-Q%^40`794YTdI@xqp;K* zRhMxS9nVAEGCy_8Q!hfE#ga~zHmHDlJhb@Z9vcDo6h zFu%*_Z+jU=o9?ka>{o9zy929Vg`3h}jT)ONsyC@9lQIMBajmuodGx`b?-2^RCh7<} zp&1H|D}n1Xw0nswc(85BTS-I^s1Ai5Ki!|b^mR#wJ!m>pinp(L%bly2dQExfXRF%> z18aBn9L_^wt9+T8YB~b1GX$&R0AL*E>oJo@>DvONzfF#F&Oq}SF<>dQFe_9SN0vy5 zPNm0If5npo_$0i%-oq&$oIFLpG84-uBnOZp*#$UwKP?RDqKbrwqOz z-oxL|Z4GK&^fk}2gy09nV)zU4WFaIfiWxDghN$XuNFO;^<>5#~yJJg;d@;dbqKuKx zs#)P`Zi0cm-z!ACkTn}m#_W#E*md!P^5gDo*R?r!5ucdKomBSknupoiX2c}mT@9?= zl8amI+u=FDw0F8ugK!Z_8o+{+8x*P@Fndx%@+sYI7B0JneX6eARIP;1pw}b;u(q<@^hnXSHnsWS zKT{EJh0N}Yvq*V$b&*K`YT7{jEaqMp?g>q27v#?5WS>qcrQjmH^@evkQQu)xH&@sE z3JD%oyJZn`va$I^2eZ;rY*HDXI@$^0HJiY_eV7MH?7!?zo}~Wj65eRj=rDZiSfbFd zqWI~$^b0r*pL~(Prj+M*%Zp83+4?X1JP#ES)m6yo^fp!wL!j}QJ=9FUJ zcM1ScUvTV~wP3ETN}g<8Zf-c8+OsJ^=Qe8{hwmNpWQ7^hvXF}YAb+*#QRXOl>c~ef zENDf+v+t!|Ah-`zH@^iJ77huz9nhQN+lmSJTDyKLC`Muet8F#-Gu~Z00KOt^X^Eq# zd?{wRR+o$jBd{n57pfkDF(7BX8 zIZe#&^TvwIB>z+MAS4RV4^(l-QJZ8~7>&+?4lf-yQ5+g+-#j3JM2}K^@$PG@Oggd; z>1$4DN0BCum~&u#We3&#TW+hOETIw1jrJ`tIxZwOR#cMq4Ez9YD*ja z#Pry+`8Gt$r-VglqDMh|WC2QGbAIg-i*aHlGSwb%chRj@NM}E8 zn|_r7LC|F)*Ag|U7LqLQ$?k#<^W?*{X*iNLc_%k}&k3ECge1D;Eq4@<>2jon`1xh_1h}7fidCpd-fz|4@;B<-oD5m8CIQO~YXMT`XB6?xAM{CI zHm4S?vRMSQr3Q1AfG`F}<=-6IEwN6Em%)&~$yEGq<+pvUkY7~Xb45h{z-Q%74k%YL zsR4LYiMTesl0RLce+mHPdyQ=wc$v|h=02*qO^*aUU^?sOOsW^Ctazaf3`wtK37L0p z!$sLaZC%iBXjv%ZcwD694n2@97IGelEj~9jou;{Nz8epf zQ_8WUA{+~!7k2c!IW$pX1*iSa`D}pjmeSkdO{9ZTw^|$T z!`t?TpZG&3y!FYc8qGJ%F_DvAg`M+LvOrd0e(DW#+_mz&nkB=YpxLJL=ekj;5{!_( zlDEp|=UL(z_G+7RXV>efcR>0lyCTFP@*WsXs3FF~&iAc0&oFG+3J-7egxmOf0zn;I}~A$Tgz zX2ja=jtNxK&0&ZZyhx?$wmp0ex6KBrD%A5bC71uTB?y}Fhp`RpfY@iL-H#k3F2V=D z>#^K#(v>T70Vj6)QRhI`lVtybZ|_0p zY~$&UIs(01l8AVL8Hpq5(yhZoTyGQ*HkPS>S8Z5nOsfkK!4JuJ!yknhhROnR`p9Yi zz@|*|y${F8dwsgBk&&rt7F7L_EUiS6$0`eeL;d6wnR++dh0A>5kCE^jtaK(m*v*|| z&hJ5o5D)53EM6`z;!0c|(j0(&yccLkk5i)8MrbfJhFj7kw3=UUC6%BIi6~0ebZ_gH zSdZ_weB5WzaLK=mJQy~rX;sSbJTVC<2iE9z#L9h0Ss00Ax}*&vHu~HD2Iqy3|$)C zy{&0j`z|EU!yl*7@2iw5oHL3ivq*X*AY)IV5)A(9?~(M5A@5CyHq*npKV9a1v6l;S zOoI7v<^6k`)OJ)CYqB0*45&1v-nMx62rpl_yJjn_KFRcjHfEAx@B2ekKE8cbI`Nmv@Mxc70Qtf>X&xqC85Q0kE4^nr>uPz=F zy+gF`r{Y4+-vt6R|C8|So~4Ixyw=VQ9Q1>DTU-XRFLwUJ z*Tid&dYUbba zZ*!{PpnpBEfxylKlx-LT_-Jd>3o$riTo$15Pm^7zdckZGhb1c2ug#wsOE2BF!%ntSXBxCCZS{Eq30&5ch(fK%?|JgNpuN zd<#&;Xfzfaq+6 z{Kl_baEjJ_B(^7RWaTqoO`80tJsk{6W@n(V1(g5l{JBi6e`kQoI2flmTLmeB3Mqf6 z6YAn=>^C&|Tss*2Ak97<$hoKDdm7j6a${04aR?vRo;CfwJ;Mq5H>LuRndx{Y|V7F;lYp-Wq*Su`08V+4x`QMojy>%hyRbI z*~)J61&P8jz1$O4$UBZm9yr6DC3xW6fT&6~oO@j^?nzuIm>C(cb8K&)`EmzEN!Zsy z+IQ_?1>Bj@m9l%x`=i=u2Hcw;J#q33dm%giZo#4P-`{qJh3x& znPcM|V9%CeC-t14GI=lSMfjgP07>4Qtz@fQL}}nrF;dsxS%lGKc3yJ%OiKnPVgRyk}&A**lqz%J)7F`tEUx?D<&ol zKG<*el;Wsvkd`~jUN0Q{r<`~z9s;3i;vueN?#p=OtYU(B22>_5x~RO2pd}D7-P`$j z^A6?~#z#yM*CZ8QtW9XP?j(~xO9Y7zh6(QI2r8J}I25}7EoE(k)&h`}W_o%d3}Qs* zY!R~4>5DI^e7^XI{Ko^iWxxW6=>OXM0Wiib56)il1zF$*Di?L8?XIIj6x@-0vwOlj zLTb;VK**G`-C>z!?&QE#9OsN>H|0$c*XMn{sa40lZ44AtdA@$)Lv`Qq;LPxo;j*B? z8Ufq_fyP)@@}?0YX0lE9$|aVSIWW?diZeFe&L=TBIjo;G-=EHKw`16j{7OS>%uQb+ zgWsQoadOw1Qj$L}JEhvaf`_lm_G=AQBh@m!3@a-V-E)<{>byZyfV^nM|%88Qd(X^713PAA8`6=o__s-ko^uft=<*($D1lB{?M552N~igFt4?16Sgj4;>~-iD zR`@YN=o{GhuA43dGQ{To$VCbQtu|dq(`$n9t$!gP&Tny<`tNj^<%2rZPfId1-b*dS zJ)>)mix@^u@a?Alej>0>o~rbZx;pkZN`;a#?{Rc$dA`%NI9tn8f9ZJz*^lz(fTKH7 zVBwdTu!1zY;*z(XZ=*4+@$nOT7dr2GR%Z1!$`?h!imb2lbY*piMafKqTaPfy3eOa9 zPjUBZqm{xo%5I_ciq?QHayZMoST@afALAkqMrvvBSNlRIw{gog=)1@8!~cm{fh}%b z6PifJ(8LG|e6%$8`2v;Wg0oiPy{Fe=NyAtPTp_;;Uhg%n!SLvPJtU`zX7d?8X3J~H zm(<}lR5W!hrrMulFE(6b@|Au9a9Pe9XmN^Q;snaPHtgA9qr63?@#fJ5v1b^qcfe>W z{P|)+1=hPvQ2)05op0f;an~uk>1o_8Ugfs2a{<`cZAmsHX$kUP+ zo$cij7sctD5rz2B)suB%Y_=?@!&Rq$u%uIRcQ0C9spB$UTk4oRwI)H1uUqOoJS*4m z1dP1>)Kj7E9z@O08Lytpt^pN1|!a72l&r4nGaC|Fw&b zy%$|gyu2FjSlo>dH~VUsjos_JU+M%;!^*A+>Mrwgtnl`(3{q+;btDkTSBlpy{m2GG zA<-;NAMY8IK-0Sjvx&%e6*l8W2MWDgAV6UwPTUIufw72Yonw70|9N^ z9Dz?S&4{25IX|*{kl73oa#dcDC*|?qY>N{J;(i{L28~|?YzJG9{$BfGs2BqVXmoiX ziD%L)?qKJWfN?^V#VXYCcx_W$X*~CsVI%mDD+%e?s6M{1JDQhY@;(ZwcaKsW1P@Mx z+1@O7e(OMNgOVUx)WYG9)1*RQQ`j$Ib9^FhT*;2Mf&lYgoj~eU5URI4@kKtqT)s## zjcIge%G1GJiYycfsX!ykXA9BmZND*G`7T1wj8@$ME)sdiIjd2lSbX0Ss;BxL2!;Ea zPlS@zq3WVMEf`|LF+=J^1LK#_Q`VOs(xc^giT6*Y-3t%sFF$S{J;<2xeQ_>MzGB5? zGGmz;;-cbp(s#t!94W+}4Yt6S@ol?X4=C|dvUmljU*i*gMB@{RTyLk`KN8t$yfZt2 z5dQX;U{6OBn5sf0{eGpce4jn&aF~15(D?`WOw}_4Ke?hFpi42bPe%eH?tGjY>HpF` zBuComGbV@Xzm5*OapnFsxi;z7V!E#DI)1(P$P`fNC*Bz52KNAxLMNUk+S#R0R;&G1 zWI6h%L_qK3vMJO=wvOBp-u%pc{M0umOb$7v?!9U?m1`l zGJ>&Dg?!m9NG>C*_kL84^Y#ilG$GnQ1?G)|&Vj&Q9#Ogjktcg02XgmOfZht%m;D9;8;9l4)g>lpC> zkXVeL5q?&5)a&5q`6~1tZQuAr`Jv0;d%t~6zh^p4ujRpu@#UCIawXO8w1^Q*zaC2( z$t=%n`{0Lu3ieN5ew{lU8V+gG31*xtlg>_MH6G^m-w%Vs)d;vV-xT}q{NvUsNj~E0 zRK88_L@5ZRUMySId0zH?@>L8&`YVE z9r`@OKI3`{VCEj=eDh(k@Ws{~ruq3G=J^FO$nQwQ*!V1v0Dt*Y5Or+mcJPZQ$Cvhg zBhP3{5fcQ;MwM=>x{C*51g6?xQL;6eTQ~m@m&xb^ZIr$l75q1(%BVpKsIyZ+lx$FuYn9~9+o&d^8AlSgsMVoEs!X$~L zyZOaf$Mjhic4pEyG=xoejlT6%u6*jcXo83lpyb162-J{)^VTH83PbI^ebi~`^ zX{%`9RHb*Zynlb%L{-_}3Qi6YZW$XO}Nbc(m}mZ=sV!LmZ9f4YhRr_=kKnV846_7YWhf5OoF1B+Tcq z)N$Szp&*c&F39E~bbh(!Qo@^Hm;Y`R9_V)rwxC`JTd!74b>=Cn1atYvZd8)qQi&pg z0_bTknnU3>xomkgy%8|vvjW@V_M=8eZXY4^Tr@?`v&m)@0Fvo*I14HIPAwVmaMDhR z-T)2((nEeOsiwZz-yo-y;6OR+`pRT{?ceD0-43Qvjc1tU_xqj5ovnE6LOVN-{|25B zk8RVubutl;6X-19_t|`nE8pMNZ+sEI70jNlEuce|?p&Z!87BtG9NKg{{L1=(IXYea z40L8_KlyAk--!k7Ij*mERHgrimutbMQ$RwEXSq%qvrpT#;0_)*V@cRXtA8v^*93H1 zl9JK5-hc`T3wW5E3!~JH@;3DIw$jVFPIQMw<|=Fj_m6s?)heH=iRpL&BlQ%^V<~;b zl$2D|kY6~mlAR)gb$^G=REPj#0D*FVPA23L*$o>>$Mnk3K#K0>t$I-&KMCL^$zty4 zgGZ0JL@~)#BHuRT{CK0#93)vqV!8jDT!K#VY&D2_?YAY4J9e`56cWq0AgbjqQ^{)u zCrq19I)ahw&4~UF%x$x&o8=wEoeoIsq>99t>9MwZA%(x$I=*f<-UcCFA4*i8y@=2C zf{`1e);m=3V#WNr=Lj#@@u5+RCTd6^IYiJu>cd8aQuXzcH;yGh8jT;h{qd1~g{+eT z+LM}VLC|+!q`P6sR*p%TOh}=_h!Go3y%=15PO-DbMUK|Mh~`c|fOfM_QE>O=w^I^@ zm=1x*(t6w#fjtTPYrpKQ(qN}@se38g%SU1Be$xic>%*U@dHVknd?%#bFLnMcl^ZS- z?6U7^dsqOdnSR_V<}5$tX)2)qus=%2$95QzjwKP* zNM?$TBF51^v&W7+5V4YaoCKaD@0*{QA%iv8d2d#|zo9E=qA~1t;qG)@4+Z{;NN8_C zZs5&DvSj78OCp(lYi5{!AcxHMj$j~|z5HVqRt1vU2eLQKLeDe_r2r``myV^ zl$~F2AQOf(A@5#wYe)3#Flfzf8{#pzcB`=mlGFA2Zn=-5b;dRC+B=rW!Xd~`kH?($ zX!f!iS#ys-8yhlEf&U}+MecNG6r!Z^ez_l+1F$_lVm|=n8g08&^-x|h#M!BDDO;9O zP3B)^A_}{JG6*DmLSbnK%CWREF?0$sGzzM?jD<>WBer7UMa1>aE#sSv1z(R!zn0t> zs46o?{3wl-gV9tpbEiEC1)$6BMNn`nX-r3rLx)2cp5XV?9 z`unTX({&WAl=1u7VB;j7WFNzQCOIzVO+(k`0*gc=R*KY^vaHtw;>{E_CXt6@1KH~H zK14-qB}toO5ox5&fKv-xWy(J)=tOH^-Q)SvWe%TAG~_KI^4Zb&M7*L;`4>3inb`35 z$C&I%p1~k<9tQ8LCh0ugxvp&;^%Y`0Oirp~du2tgodxu(W;DKR0I;3*U}DY*wxvS<)ksN(?y#4FJNhTZ3%!pajfaM9I6U1(Zq?l1 zibWr1Dn6)n@rxoJsK^^(>F&f+<%G<<=MK1#l)@Z?);T{y%sM9Ur{`qV7=kARf;8s? zD(imw4Z$jpv{#{OS`@)#*2^-SzQa>JOe4M2|if>HD9OT-4{i~y(~pW zpMM6$%3s)gkw9uxAdo^}r88v&NnCMGM>yce08x>v#9GO_i+RARobGy#2B%b2iC2jptYAFiK-7*Ge}QNms~E?*{RSRZQnf&?-qoK?4^6SMd}7`J8ok^gb=CO-@O~A#o>73d{0>0sW-KYD@jrYJf8B6J*!GD^ z^K?W*lS%1Js>vl) zq)$y8@s@&J@t3pFeOL+^Y41$&Uu#5??}y5qsrW=lL*2iIo{j3a=sZdJ+LV#ZI78po zTSMi$87q9fG4F+LK4Jl#Any^(3}3QP$g9Ys?oDJwf6RK_01h9pd_Ku)Kq-YO(Bx9^ z+j?2f$RG5Qrf6$6A80nNGjs5n6^LFm_GFIqgTDrsZ{rNdqtmgH;Xm;0+=DDU@tmzw z)r@h^M+`Zgc@!1-(n_Sj-VF=^y-sy9pnxby?Kj4Y^bX4Az16q8V*_c=if)L-yTr`&I3 zmRn>#<+edaP=3v+~n5Be7rc7Xhn? zm=S}~~`DMeo>uRI zW%#tOverM48E!s?AD$%HW~=O5%^*P`&oqLhPqw@U*tSiY)u2>J~#E_)L$T%j)V51o`Jp`BndlFg4Opa5A{n|+`SCZ%)5-_`C` zc)c0VKcG*k_L=M#%(b8_t(o{*qXyLT$Ci!N?CF|?J;Y2kFVYz>;_m~*JXgy%?;| zL=x3kfcMseHqJx{^xttDkiae?f&+S4P&DPEkJi*}@^7bH3&Fj(Dy1F{Bn<)H=q%&tZD#AqSD?eBVY`FNO(1i@WF zS;3;tEN%L~pDZGSTy}jo-U{k?qYxlx64N*BlLlFFm^Cj3b~887y;k?*pLwuC`Bkf|ahecu{z)618E_1m0d+;Mq4 z@FG5J6*K2haQ$2-=K$$ItyQW^*wqA*b16;*`WLUA8o1G7JHE~msOHL`#Zg6Vjk*+E zilKrKxe?|3jr0$p_kkkNX*cjk9P0e5Dl&3qvMr#~?f{3lz%HB)gdLS^!;^py1Z^PD N8$~sR3OTc&{{b(fTU!7C literal 0 HcmV?d00001 diff --git a/openpype/settings/defaults/project_settings/shotgrid.json b/openpype/settings/defaults/project_settings/shotgrid.json new file mode 100644 index 0000000000..83b6f69074 --- /dev/null +++ b/openpype/settings/defaults/project_settings/shotgrid.json @@ -0,0 +1,22 @@ +{ + "shotgrid_project_id": 0, + "shotgrid_server": "", + "event": { + "enabled": false + }, + "fields": { + "asset": { + "type": "sg_asset_type" + }, + "sequence": { + "episode_link": "episode" + }, + "shot": { + "episode_link": "sg_episode", + "sequence_link": "sg_sequence" + }, + "task": { + "step": "step" + } + } +} diff --git a/openpype/settings/defaults/system_settings/modules.json b/openpype/settings/defaults/system_settings/modules.json index 8cd4114cb0..9d8910689a 100644 --- a/openpype/settings/defaults/system_settings/modules.json +++ b/openpype/settings/defaults/system_settings/modules.json @@ -131,6 +131,12 @@ } } }, + "shotgrid": { + "enabled": false, + "leecher_manager_url": "http://127.0.0.1:3000", + "leecher_backend_url": "http://127.0.0.1:8090", + "shotgrid_settings": {} + }, "kitsu": { "enabled": false, "server": "" @@ -203,4 +209,4 @@ "linux": "" } } -} \ No newline at end of file +} diff --git a/openpype/settings/entities/__init__.py b/openpype/settings/entities/__init__.py index a173e2454f..b2cb2204f4 100644 --- a/openpype/settings/entities/__init__.py +++ b/openpype/settings/entities/__init__.py @@ -107,6 +107,7 @@ from .enum_entity import ( TaskTypeEnumEntity, DeadlineUrlEnumEntity, AnatomyTemplatesEnumEntity, + ShotgridUrlEnumEntity ) from .list_entity import ListEntity @@ -171,6 +172,7 @@ __all__ = ( "ToolsEnumEntity", "TaskTypeEnumEntity", "DeadlineUrlEnumEntity", + "ShotgridUrlEnumEntity", "AnatomyTemplatesEnumEntity", "ListEntity", diff --git a/openpype/settings/entities/enum_entity.py b/openpype/settings/entities/enum_entity.py index 92a397afba..3b3dd47e61 100644 --- a/openpype/settings/entities/enum_entity.py +++ b/openpype/settings/entities/enum_entity.py @@ -1,10 +1,7 @@ import copy from .input_entities import InputEntity from .exceptions import EntitySchemaError -from .lib import ( - NOT_SET, - STRING_TYPE -) +from .lib import NOT_SET, STRING_TYPE class BaseEnumEntity(InputEntity): @@ -26,7 +23,7 @@ class BaseEnumEntity(InputEntity): for item in self.enum_items: key = tuple(item.keys())[0] if key in enum_keys: - reason = "Key \"{}\" is more than once in enum items.".format( + reason = 'Key "{}" is more than once in enum items.'.format( key ) raise EntitySchemaError(self, reason) @@ -34,7 +31,7 @@ class BaseEnumEntity(InputEntity): enum_keys.add(key) if not isinstance(key, STRING_TYPE): - reason = "Key \"{}\" has invalid type {}, expected {}.".format( + reason = 'Key "{}" has invalid type {}, expected {}.'.format( key, type(key), STRING_TYPE ) raise EntitySchemaError(self, reason) @@ -59,7 +56,7 @@ class BaseEnumEntity(InputEntity): for item in check_values: if item not in self.valid_keys: raise ValueError( - "{} Invalid value \"{}\". Expected one of: {}".format( + '{} Invalid value "{}". Expected one of: {}'.format( self.path, item, self.valid_keys ) ) @@ -84,7 +81,7 @@ class EnumEntity(BaseEnumEntity): self.valid_keys = set(all_keys) if self.multiselection: - self.valid_value_types = (list, ) + self.valid_value_types = (list,) value_on_not_set = [] if enum_default: if not isinstance(enum_default, list): @@ -109,7 +106,7 @@ class EnumEntity(BaseEnumEntity): self.value_on_not_set = key break - self.valid_value_types = (STRING_TYPE, ) + self.valid_value_types = (STRING_TYPE,) # GUI attribute self.placeholder = self.schema_data.get("placeholder") @@ -152,6 +149,7 @@ class HostsEnumEntity(BaseEnumEntity): Host name is not the same as application name. Host name defines implementation instead of application name. """ + schema_types = ["hosts-enum"] all_host_names = [ "aftereffects", @@ -169,7 +167,7 @@ class HostsEnumEntity(BaseEnumEntity): "tvpaint", "unreal", "standalonepublisher", - "webpublisher" + "webpublisher", ] def _item_initialization(self): @@ -210,7 +208,7 @@ class HostsEnumEntity(BaseEnumEntity): self.valid_keys = valid_keys if self.multiselection: - self.valid_value_types = (list, ) + self.valid_value_types = (list,) self.value_on_not_set = [] else: for key in valid_keys: @@ -218,7 +216,7 @@ class HostsEnumEntity(BaseEnumEntity): self.value_on_not_set = key break - self.valid_value_types = (STRING_TYPE, ) + self.valid_value_types = (STRING_TYPE,) # GUI attribute self.placeholder = self.schema_data.get("placeholder") @@ -226,14 +224,10 @@ class HostsEnumEntity(BaseEnumEntity): def schema_validations(self): if self.hosts_filter: enum_len = len(self.enum_items) - if ( - enum_len == 0 - or (enum_len == 1 and self.use_empty_value) - ): - joined_filters = ", ".join([ - '"{}"'.format(item) - for item in self.hosts_filter - ]) + if enum_len == 0 or (enum_len == 1 and self.use_empty_value): + joined_filters = ", ".join( + ['"{}"'.format(item) for item in self.hosts_filter] + ) reason = ( "All host names were removed after applying" " host filters. {}" @@ -246,24 +240,25 @@ class HostsEnumEntity(BaseEnumEntity): invalid_filters.add(item) if invalid_filters: - joined_filters = ", ".join([ - '"{}"'.format(item) - for item in self.hosts_filter - ]) - expected_hosts = ", ".join([ - '"{}"'.format(item) - for item in self.all_host_names - ]) - self.log.warning(( - "Host filters containt invalid host names:" - " \"{}\" Expected values are {}" - ).format(joined_filters, expected_hosts)) + joined_filters = ", ".join( + ['"{}"'.format(item) for item in self.hosts_filter] + ) + expected_hosts = ", ".join( + ['"{}"'.format(item) for item in self.all_host_names] + ) + self.log.warning( + ( + "Host filters containt invalid host names:" + ' "{}" Expected values are {}' + ).format(joined_filters, expected_hosts) + ) super(HostsEnumEntity, self).schema_validations() class AppsEnumEntity(BaseEnumEntity): """Enum of applications for project anatomy attributes.""" + schema_types = ["apps-enum"] def _item_initialization(self): @@ -271,7 +266,7 @@ class AppsEnumEntity(BaseEnumEntity): self.value_on_not_set = [] self.enum_items = [] self.valid_keys = set() - self.valid_value_types = (list, ) + self.valid_value_types = (list,) self.placeholder = None def _get_enum_values(self): @@ -352,7 +347,7 @@ class ToolsEnumEntity(BaseEnumEntity): self.value_on_not_set = [] self.enum_items = [] self.valid_keys = set() - self.valid_value_types = (list, ) + self.valid_value_types = (list,) self.placeholder = None def _get_enum_values(self): @@ -409,10 +404,10 @@ class TaskTypeEnumEntity(BaseEnumEntity): def _item_initialization(self): self.multiselection = self.schema_data.get("multiselection", True) if self.multiselection: - self.valid_value_types = (list, ) + self.valid_value_types = (list,) self.value_on_not_set = [] else: - self.valid_value_types = (STRING_TYPE, ) + self.valid_value_types = (STRING_TYPE,) self.value_on_not_set = "" self.enum_items = [] @@ -507,7 +502,8 @@ class DeadlineUrlEnumEntity(BaseEnumEntity): enum_items_list = [] for server_name, url_entity in deadline_urls_entity.items(): enum_items_list.append( - {server_name: "{}: {}".format(server_name, url_entity.value)}) + {server_name: "{}: {}".format(server_name, url_entity.value)} + ) valid_keys.add(server_name) return enum_items_list, valid_keys @@ -530,6 +526,50 @@ class DeadlineUrlEnumEntity(BaseEnumEntity): self._current_value = tuple(self.valid_keys)[0] +class ShotgridUrlEnumEntity(BaseEnumEntity): + schema_types = ["shotgrid_url-enum"] + + def _item_initialization(self): + self.multiselection = False + + self.enum_items = [] + self.valid_keys = set() + + self.valid_value_types = (STRING_TYPE,) + self.value_on_not_set = "" + + # GUI attribute + self.placeholder = self.schema_data.get("placeholder") + + def _get_enum_values(self): + shotgrid_settings = self.get_entity_from_path( + "system_settings/modules/shotgrid/shotgrid_settings" + ) + + valid_keys = set() + enum_items_list = [] + for server_name, settings in shotgrid_settings.items(): + enum_items_list.append( + { + server_name: "{}: {}".format( + server_name, settings["shotgrid_url"].value + ) + } + ) + valid_keys.add(server_name) + return enum_items_list, valid_keys + + def set_override_state(self, *args, **kwargs): + super(ShotgridUrlEnumEntity, self).set_override_state(*args, **kwargs) + + self.enum_items, self.valid_keys = self._get_enum_values() + if not self.valid_keys: + self._current_value = "" + + elif self._current_value not in self.valid_keys: + self._current_value = tuple(self.valid_keys)[0] + + class AnatomyTemplatesEnumEntity(BaseEnumEntity): schema_types = ["anatomy-templates-enum"] diff --git a/openpype/settings/entities/schemas/projects_schema/schema_main.json b/openpype/settings/entities/schemas/projects_schema/schema_main.json index 6c07209de3..80b1baad1b 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_main.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_main.json @@ -62,6 +62,10 @@ "type": "schema", "name": "schema_project_ftrack" }, + { + "type": "schema", + "name": "schema_project_shotgrid" + }, { "type": "schema", "name": "schema_project_kitsu" diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_shotgrid.json b/openpype/settings/entities/schemas/projects_schema/schema_project_shotgrid.json new file mode 100644 index 0000000000..4faeca89f3 --- /dev/null +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_shotgrid.json @@ -0,0 +1,98 @@ +{ + "type": "dict", + "key": "shotgrid", + "label": "Shotgrid", + "collapsible": true, + "is_file": true, + "children": [ + { + "type": "number", + "key": "shotgrid_project_id", + "label": "Shotgrid project id" + }, + { + "type": "shotgrid_url-enum", + "key": "shotgrid_server", + "label": "Shotgrid Server" + }, + { + "type": "dict", + "key": "event", + "label": "Event Handler", + "collapsible": true, + "checkbox_key": "enabled", + "children": [ + { + "type": "boolean", + "key": "enabled", + "label": "Enabled" + } + ] + }, + { + "type": "dict", + "key": "fields", + "label": "Fields Template", + "collapsible": true, + "children": [ + { + "type": "dict", + "key": "asset", + "label": "Asset", + "collapsible": true, + "children": [ + { + "type": "text", + "key": "type", + "label": "Asset Type" + } + ] + }, + { + "type": "dict", + "key": "sequence", + "label": "Sequence", + "collapsible": true, + "children": [ + { + "type": "text", + "key": "episode_link", + "label": "Episode link" + } + ] + }, + { + "type": "dict", + "key": "shot", + "label": "Shot", + "collapsible": true, + "children": [ + { + "type": "text", + "key": "episode_link", + "label": "Episode link" + }, + { + "type": "text", + "key": "sequence_link", + "label": "Sequence link" + } + ] + }, + { + "type": "dict", + "key": "task", + "label": "Task", + "collapsible": true, + "children": [ + { + "type": "text", + "key": "step", + "label": "Step link" + } + ] + } + ] + } + ] +} diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_representation_tags.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_representation_tags.json index 484fbf9d07..a4b28f47bc 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_representation_tags.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_representation_tags.json @@ -13,6 +13,9 @@ { "ftrackreview": "Add review to Ftrack" }, + { + "shotgridreview": "Add review to Shotgrid" + }, { "delete": "Delete output" }, diff --git a/openpype/settings/entities/schemas/system_schema/schema_modules.json b/openpype/settings/entities/schemas/system_schema/schema_modules.json index d22b9016a7..952b38040c 100644 --- a/openpype/settings/entities/schemas/system_schema/schema_modules.json +++ b/openpype/settings/entities/schemas/system_schema/schema_modules.json @@ -48,6 +48,60 @@ "type": "schema", "name": "schema_kitsu" }, + { + "type": "dict", + "key": "shotgrid", + "label": "Shotgrid", + "collapsible": true, + "checkbox_key": "enabled", + "children": [ + { + "type": "boolean", + "key": "enabled", + "label": "Enabled" + }, + { + "type": "text", + "key": "leecher_manager_url", + "label": "Shotgrid Leecher Manager URL" + }, + { + "type": "text", + "key": "leecher_backend_url", + "label": "Shotgrid Leecher Backend URL" + }, + { + "type": "boolean", + "key": "filter_projects_by_login", + "label": "Filter projects by SG login" + }, + { + "type": "dict-modifiable", + "key": "shotgrid_settings", + "label": "Shotgrid Servers", + "object_type": { + "type": "dict", + "children": [ + { + "key": "shotgrid_url", + "label": "Server URL", + "type": "text" + }, + { + "key": "shotgrid_script_name", + "label": "Script Name", + "type": "text" + }, + { + "key": "shotgrid_script_key", + "label": "Script api key", + "type": "text" + } + ] + } + } + ] + }, { "type": "dict", "key": "timers_manager", diff --git a/poetry.lock b/poetry.lock index 7221e191ff..0033bc0d73 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1375,6 +1375,21 @@ category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +[[package]] +name = "shotgun-api3" +version = "3.3.3" +description = "Shotgun Python API" +category = "main" +optional = false +python-versions = "*" +develop = false + +[package.source] +type = "git" +url = "https://github.com/shotgunsoftware/python-api.git" +reference = "v3.3.3" +resolved_reference = "b9f066c0edbea6e0733242e18f32f75489064840" + [[package]] name = "six" version = "1.16.0" @@ -2820,6 +2835,7 @@ semver = [ {file = "semver-2.13.0-py2.py3-none-any.whl", hash = "sha256:ced8b23dceb22134307c1b8abfa523da14198793d9787ac838e70e29e77458d4"}, {file = "semver-2.13.0.tar.gz", hash = "sha256:fa0fe2722ee1c3f57eac478820c3a5ae2f624af8264cbdf9000c980ff7f75e3f"}, ] +shotgun-api3 = [] six = [ {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, diff --git a/pyproject.toml b/pyproject.toml index bd5d3ad89d..306c7206fa 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -40,6 +40,7 @@ clique = "1.6.*" Click = "^7" dnspython = "^2.1.0" ftrack-python-api = "2.0.*" +shotgun_api3 = {git = "https://github.com/shotgunsoftware/python-api.git", rev = "v3.3.3"} gazu = "^0.8.28" google-api-python-client = "^1.12.8" # sync server google support (should be separate?) jsonschema = "^2.6.0" From eedc67edcc37dcfeae0254e4e856ec4b7df409cf Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 4 Jul 2022 16:28:59 +0200 Subject: [PATCH 197/785] use project entity from context instead of requery --- .../deadline/plugins/publish/submit_houdini_remote_publish.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_houdini_remote_publish.py b/openpype/modules/deadline/plugins/publish/submit_houdini_remote_publish.py index f834ae7e92..fdf67b51bc 100644 --- a/openpype/modules/deadline/plugins/publish/submit_houdini_remote_publish.py +++ b/openpype/modules/deadline/plugins/publish/submit_houdini_remote_publish.py @@ -55,7 +55,7 @@ class HoudiniSubmitPublishDeadline(pyblish.api.ContextPlugin): scenename = os.path.basename(scene) # Get project code - project = legacy_io.find_one({"type": "project"}) + project = context.data["projectEntity"] code = project["data"].get("code", project["name"]) job_name = "{scene} [PUBLISH]".format(scene=scenename) From 5ba81e8c284e9ee38ceb84dae2aee25d054f8685 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 4 Jul 2022 16:37:36 +0200 Subject: [PATCH 198/785] use query function in submit to deadline --- .../plugins/publish/submit_publish_job.py | 26 ++++++++++++++----- 1 file changed, 19 insertions(+), 7 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_publish_job.py b/openpype/modules/deadline/plugins/publish/submit_publish_job.py index 9dd1428a63..b098eaba8e 100644 --- a/openpype/modules/deadline/plugins/publish/submit_publish_job.py +++ b/openpype/modules/deadline/plugins/publish/submit_publish_job.py @@ -11,6 +11,7 @@ import clique import pyblish.api import openpype.api +from openpype.client import get_representations from openpype.pipeline import ( get_representation_path, legacy_io, @@ -18,15 +19,23 @@ from openpype.pipeline import ( from openpype.pipeline.farm.patterning import match_aov_pattern -def get_resources(version, extension=None): +def get_resources(project_name, version, extension=None): """Get the files from the specific version.""" - query = {"type": "representation", "parent": version["_id"]} + + # TODO this functions seems to be weird + # - it's looking for representation with one extension or first (any) + # representation from a version? + # - not sure how this should work, maybe it does for specific use cases + # but probably can't be used for all resources from 2D workflows + extensions = None if extension: - query["name"] = extension - - representation = legacy_io.find_one(query) - assert representation, "This is a bug" + extensions = [extension] + repre_docs = list(get_representations( + project_name, version_ids=[version["_id"]], extensions=extensions + )) + assert repre_docs, "This is a bug" + representation = repre_docs[0] directory = get_representation_path(representation) print("Source: ", directory) resources = sorted( @@ -330,13 +339,16 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): self.log.info("Preparing to copy ...") start = instance.data.get("frameStart") end = instance.data.get("frameEnd") + project_name = legacy_io.active_project() # get latest version of subset # this will stop if subset wasn't published yet version = openpype.api.get_latest_version(instance.data.get("asset"), instance.data.get("subset")) # get its files based on extension - subset_resources = get_resources(version, representation.get("ext")) + subset_resources = get_resources( + project_name, version, representation.get("ext") + ) r_col, _ = clique.assemble(subset_resources) # if override remove all frames we are expecting to be rendered From 927fe351a33be543a9d02d11b2924240ac2c380c Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 4 Jul 2022 22:43:14 +0200 Subject: [PATCH 199/785] settings: adding editorial family --- .../project_settings/traypublisher.json | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/openpype/settings/defaults/project_settings/traypublisher.json b/openpype/settings/defaults/project_settings/traypublisher.json index 0b54cfd39e..e938384282 100644 --- a/openpype/settings/defaults/project_settings/traypublisher.json +++ b/openpype/settings/defaults/project_settings/traypublisher.json @@ -30,6 +30,24 @@ ".psb", ".aep" ] + }, + { + "family": "editorial", + "identifier": "", + "label": "Editorial", + "icon": "fa.file", + "default_variants": [ + "Main" + ], + "description": "Editorial files to generate shots.", + "detailed_description": "Supporting publishing new shots to project or updating already created. Publishing will create OTIO file.", + "allow_sequences": false, + "extensions": [ + ".edl", + ".xml", + ".aaf", + ".fcpxml" + ] } ] } \ No newline at end of file From 3e058c6e8ac79ebb9933d0ad02957b0467f3a578 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 5 Jul 2022 09:20:00 +0200 Subject: [PATCH 200/785] Move IntegrateAsset --- openpype/plugins/publish/integrate.py | 832 ++++++++++++++++++++++++++ 1 file changed, 832 insertions(+) create mode 100644 openpype/plugins/publish/integrate.py diff --git a/openpype/plugins/publish/integrate.py b/openpype/plugins/publish/integrate.py new file mode 100644 index 0000000000..6ad0849ff7 --- /dev/null +++ b/openpype/plugins/publish/integrate.py @@ -0,0 +1,832 @@ +import os +import logging +import sys +import copy +import clique +import six + +from bson.objectid import ObjectId +from pymongo import DeleteMany, ReplaceOne, InsertOne, UpdateOne +import pyblish.api + +import openpype.api +from openpype.modules import ModulesManager +from openpype.lib.profiles_filtering import filter_profiles +from openpype.lib.file_transaction import FileTransaction +from openpype.pipeline import legacy_io + +log = logging.getLogger(__name__) + + +def assemble(files): + """Convenience `clique.assemble` wrapper for files of a single collection. + + Unlike `clique.assemble` this wrapper does not allow more than a single + Collection nor any remainder files. Errors will be raised when not only + a single collection is assembled. + + Returns: + clique.Collection: A single sequence Collection + + Raises: + ValueError: Error is raised when files do not result in a single + collected Collection. + + """ + # todo: move this to lib? + # Get the sequence as a collection. The files must be of a single + # sequence and have no remainder outside of the collections. + patterns = [clique.PATTERNS["frames"]] + collections, remainder = clique.assemble(files, + minimum_items=1, + patterns=patterns) + if not collections: + raise ValueError("No collections found in files: " + "{}".format(files)) + if remainder: + raise ValueError("Files found not detected as part" + " of a sequence: {}".format(remainder)) + if len(collections) > 1: + raise ValueError("Files in sequence are not part of a" + " single sequence collection: " + "{}".format(collections)) + return collections[0] + + +def get_instance_families(instance): + """Get all families of the instance""" + # todo: move this to lib? + family = instance.data.get("family") + families = [] + if family: + families.append(family) + + for _family in (instance.data.get("families") or []): + if _family not in families: + families.append(_family) + + return families + + +def get_frame_padded(frame, padding): + """Return frame number as string with `padding` amount of padded zeros""" + return "{frame:0{padding}d}".format(padding=padding, frame=frame) + + +def get_first_frame_padded(collection): + """Return first frame as padded number from `clique.Collection`""" + start_frame = next(iter(collection.indexes)) + return get_frame_padded(start_frame, padding=collection.padding) + + +def bulk_write(writes): + """Convenience function to bulk write into active project database""" + project = legacy_io.Session["AVALON_PROJECT"] + return legacy_io._database[project].bulk_write(writes) + + +class IntegrateAsset(pyblish.api.InstancePlugin): + """Register publish in the database and transfer files to destinations. + + Steps: + 1) Register the subset and version + 2) Transfer the representation files to the destination + 3) Register the representation + + Requires: + instance.data['representations'] - must be a list and each member + must be a dictionary with following data: + 'files': list of filenames for sequence, string for single file. + Only the filename is allowed, without the folder path. + 'stagingDir': "path/to/folder/with/files" + 'name': representation name (usually the same as extension) + 'ext': file extension + optional data + "frameStart" + "frameEnd" + 'fps' + "data": additional metadata for each representation. + """ + + label = "Integrate Asset New" + order = pyblish.api.IntegratorOrder + families = ["workfile", + "pointcache", + "camera", + "animation", + "model", + "mayaAscii", + "mayaScene", + "setdress", + "layout", + "ass", + "vdbcache", + "scene", + "vrayproxy", + "vrayscene_layer", + "render", + "prerender", + "imagesequence", + "review", + "rendersetup", + "rig", + "plate", + "look", + "audio", + "yetiRig", + "yeticache", + "nukenodes", + "gizmo", + "source", + "matchmove", + "image", + "assembly", + "fbx", + "textures", + "action", + "harmony.template", + "harmony.palette", + "editorial", + "background", + "camerarig", + "redshiftproxy", + "effect", + "xgen", + "hda", + "usd", + "staticMesh", + "skeletalMesh", + "usdComposition", + "usdOverride", + "simpleUnrealTexture" + ] + exclude_families = ["clip", "render.farm"] + default_template_name = "publish" + + # Representation context keys that should always be written to + # the database even if not used by the destination template + db_representation_context_keys = [ + "project", "asset", "task", "subset", "version", "representation", + "family", "hierarchy", "username" + ] + + # Attributes set by settings + template_name_profiles = None + + def process(self, instance): + + # Exclude instances that also contain families from exclude families + families = set(get_instance_families(instance)) + exclude = families & set(self.exclude_families) + if exclude: + self.log.debug("Instance not integrated due to exclude " + "families found: {}".format(", ".join(exclude))) + return + + file_transactions = FileTransaction(log=self.log) + try: + self.register(instance, file_transactions) + except Exception: + # clean destination + # todo: preferably we'd also rollback *any* changes to the database + file_transactions.rollback() + self.log.critical("Error when registering", exc_info=True) + six.reraise(*sys.exc_info()) + + # Finalizing can't rollback safely so no use for moving it to + # the try, except. + file_transactions.finalize() + + def register(self, instance, file_transactions): + + instance_stagingdir = instance.data.get("stagingDir") + if not instance_stagingdir: + self.log.info(( + "{0} is missing reference to staging directory." + " Will try to get it from representation." + ).format(instance)) + + else: + self.log.debug( + "Establishing staging directory " + "@ {0}".format(instance_stagingdir) + ) + + # Ensure at least one representation is set up for registering. + repres = instance.data.get("representations") + assert repres, "Instance has no representations data" + assert isinstance(repres, (list, tuple)), ( + "Instance 'representations' must be a list, got: {0} {1}".format( + str(type(repres)), str(repres) + ) + ) + + template_name = self.get_template_name(instance) + + subset, subset_writes = self.prepare_subset(instance) + version, version_writes = self.prepare_version(instance, subset) + instance.data["versionEntity"] = version + + # Get existing representations (if any) + existing_repres_by_name = { + repres["name"].lower(): repres for repres in legacy_io.find( + { + "parent": version["_id"], + "type": "representation" + }, + # Only care about id and name of existing representations + projection={"_id": True, "name": True} + ) + } + + # Prepare all representations + prepared_representations = [] + for repre in instance.data["representations"]: + + if "delete" in repre.get("tags", []): + self.log.debug("Skipping representation marked for deletion: " + "{}".format(repre)) + continue + + # todo: reduce/simplify what is returned from this function + prepared = self.prepare_representation(repre, + template_name, + existing_repres_by_name, + version, + instance_stagingdir, + instance) + + for src, dst in prepared["transfers"]: + # todo: add support for hardlink transfers + file_transactions.add(src, dst) + + prepared_representations.append(prepared) + + if not prepared_representations: + # Even though we check `instance.data["representations"]` earlier + # this could still happen if all representations were tagged with + # "delete" and thus are skipped for integration + raise RuntimeError("No representations prepared to publish.") + + # Each instance can also have pre-defined transfers not explicitly + # part of a representation - like texture resources used by a + # .ma representation. Those destination paths are pre-defined, etc. + # todo: should we move or simplify this logic? + resource_destinations = set() + for src, dst in instance.data.get("transfers", []): + file_transactions.add(src, dst, mode=FileTransaction.MODE_COPY) + resource_destinations.add(os.path.abspath(dst)) + for src, dst in instance.data.get("hardlinks", []): + file_transactions.add(src, dst, mode=FileTransaction.MODE_HARDLINK) + resource_destinations.add(os.path.abspath(dst)) + + # Bulk write to the database + # We write the subset and version to the database before the File + # Transaction to reduce the chances of another publish trying to + # publish to the same version number since that chance can greatly + # increase if the file transaction takes a long time. + bulk_write(subset_writes + version_writes) + self.log.info("Subset {subset[name]} and Version {version[name]} " + "written to database..".format(subset=subset, + version=version)) + + # Process all file transfers of all integrations now + self.log.debug("Integrating source files to destination ...") + file_transactions.process() + self.log.debug("Backed up existing files: " + "{}".format(file_transactions.backups)) + self.log.debug("Transferred files: " + "{}".format(file_transactions.transferred)) + self.log.debug("Retrieving Representation Site Sync information ...") + + # Get the accessible sites for Site Sync + manager = ModulesManager() + sync_server_module = manager.modules_by_name["sync_server"] + sites = sync_server_module.compute_resource_sync_sites( + project_name=instance.data["projectEntity"]["name"] + ) + self.log.debug("Sync Server Sites: {}".format(sites)) + + # Compute the resource file infos once (files belonging to the + # version instance instead of an individual representation) so + # we can re-use those file infos per representation + anatomy = instance.context.data["anatomy"] + resource_file_infos = self.get_files_info(resource_destinations, + sites=sites, + anatomy=anatomy) + + # Finalize the representations now the published files are integrated + # Get 'files' info for representations and its attached resources + representation_writes = [] + new_repre_names_low = set() + for prepared in prepared_representations: + representation = prepared["representation"] + transfers = prepared["transfers"] + destinations = [dst for src, dst in transfers] + representation["files"] = self.get_files_info( + destinations, sites=sites, anatomy=anatomy + ) + + # Add the version resource file infos to each representation + representation["files"] += resource_file_infos + + # Set up representation for writing to the database. Since + # we *might* be overwriting an existing entry if the version + # already existed we'll use ReplaceOnce with `upsert=True` + representation_writes.append(ReplaceOne( + filter={"_id": representation["_id"]}, + replacement=representation, + upsert=True + )) + + new_repre_names_low.add(representation["name"].lower()) + + # Delete any existing representations that didn't get any new data + # if the instance is not set to append mode + if not instance.data.get("append", False): + delete_names = set() + for name, existing_repres in existing_repres_by_name.items(): + if name not in new_repre_names_low: + # We add the exact representation name because `name` is + # lowercase for name matching only and not in the database + delete_names.add(existing_repres["name"]) + if delete_names: + representation_writes.append(DeleteMany( + filter={ + "parent": version["_id"], + "name": {"$in": list(delete_names)} + } + )) + + # Write representations to the database + bulk_write(representation_writes) + + # Backwards compatibility + # todo: can we avoid the need to store this? + instance.data["published_representations"] = { + p["representation"]["_id"]: p for p in prepared_representations + } + + self.log.info("Registered {} representations" + "".format(len(prepared_representations))) + + def prepare_subset(self, instance): + asset = instance.data.get("assetEntity") + subset_name = instance.data["subset"] + self.log.debug("Subset: {}".format(subset_name)) + + # Get existing subset if it exists + subset = legacy_io.find_one({ + "type": "subset", + "parent": asset["_id"], + "name": subset_name + }) + + # Define subset data + data = { + "families": get_instance_families(instance) + } + + subset_group = instance.data.get("subsetGroup") + if subset_group: + data["subsetGroup"] = subset_group + + bulk_writes = [] + if subset is None: + # Create a new subset + self.log.info("Subset '%s' not found, creating ..." % subset_name) + subset = { + "_id": ObjectId(), + "schema": "openpype:subset-3.0", + "type": "subset", + "name": subset_name, + "data": data, + "parent": asset["_id"] + } + bulk_writes.append(InsertOne(subset)) + + else: + # Update existing subset data with new data and set in database. + # We also change the found subset in-place so we don't need to + # re-query the subset afterwards + subset["data"].update(data) + bulk_writes.append(UpdateOne( + {"type": "subset", "_id": subset["_id"]}, + {"$set": { + "data": subset["data"] + }} + )) + + self.log.info("Prepared subset: {}".format(subset_name)) + return subset, bulk_writes + + def prepare_version(self, instance, subset): + + version_number = instance.data["version"] + + version = { + "schema": "openpype:version-3.0", + "type": "version", + "parent": subset["_id"], + "name": version_number, + "data": self.create_version_data(instance) + } + + existing_version = legacy_io.find_one({ + 'type': 'version', + 'parent': subset["_id"], + 'name': version_number + }, projection={"_id": True}) + + if existing_version: + self.log.debug("Updating existing version ...") + version["_id"] = existing_version["_id"] + else: + self.log.debug("Creating new version ...") + version["_id"] = ObjectId() + + bulk_writes = [ReplaceOne( + filter={"_id": version["_id"]}, + replacement=version, + upsert=True + )] + + self.log.info("Prepared version: v{0:03d}".format(version["name"])) + + return version, bulk_writes + + def prepare_representation(self, repre, + template_name, + existing_repres_by_name, + version, + instance_stagingdir, + instance): + + # pre-flight validations + if repre["ext"].startswith("."): + raise ValueError("Extension must not start with a dot '.': " + "{}".format(repre["ext"])) + + if repre.get("transfers"): + raise ValueError("Representation is not allowed to have transfers" + "data before integration. They are computed in " + "the integrator" + "Got: {}".format(repre["transfers"])) + + # create template data for Anatomy + template_data = copy.deepcopy(instance.data["anatomyData"]) + + # required representation keys + files = repre['files'] + template_data["representation"] = repre["name"] + template_data["ext"] = repre["ext"] + + # optionals + # retrieve additional anatomy data from representation if exists + for key, anatomy_key in { + # Representation Key: Anatomy data key + "resolutionWidth": "resolution_width", + "resolutionHeight": "resolution_height", + "fps": "fps", + "outputName": "output", + "originalBasename": "originalBasename" + }.items(): + # Allow to take value from representation + # if not found also consider instance.data + if key in repre: + value = repre[key] + elif key in instance.data: + value = instance.data[key] + else: + continue + template_data[anatomy_key] = value + + if repre.get('stagingDir'): + stagingdir = repre['stagingDir'] + else: + # Fall back to instance staging dir if not explicitly + # set for representation in the instance + self.log.debug("Representation uses instance staging dir: " + "{}".format(instance_stagingdir)) + stagingdir = instance_stagingdir + if not stagingdir: + raise ValueError("No staging directory set for representation: " + "{}".format(repre)) + + self.log.debug("Anatomy template name: {}".format(template_name)) + anatomy = instance.context.data['anatomy'] + template = os.path.normpath(anatomy.templates[template_name]["path"]) + + is_udim = bool(repre.get("udim")) + is_sequence_representation = isinstance(files, (list, tuple)) + if is_sequence_representation: + # Collection of files (sequence) + assert not any(os.path.isabs(fname) for fname in files), ( + "Given file names contain full paths" + ) + + src_collection = assemble(files) + + # If the representation has `frameStart` set it renumbers the + # frame indices of the published collection. It will start from + # that `frameStart` index instead. Thus if that frame start + # differs from the collection we want to shift the destination + # frame indices from the source collection. + destination_indexes = list(src_collection.indexes) + destination_padding = len(get_first_frame_padded(src_collection)) + if repre.get("frameStart") is not None and not is_udim: + index_frame_start = int(repre.get("frameStart")) + + render_template = anatomy.templates[template_name] + # todo: should we ALWAYS manage the frame padding even when not + # having `frameStart` set? + frame_start_padding = int( + render_template.get( + "frame_padding", + render_template.get("padding") + ) + ) + + # Shift destination sequence to the start frame + src_start_frame = next(iter(src_collection.indexes)) + shift = index_frame_start - src_start_frame + if shift: + destination_indexes = [ + frame + shift for frame in destination_indexes + ] + destination_padding = frame_start_padding + + # To construct the destination template with anatomy we require + # a Frame or UDIM tile set for the template data. We use the first + # index of the destination for that because that could've shifted + # from the source indexes, etc. + first_index_padded = get_frame_padded(frame=destination_indexes[0], + padding=destination_padding) + if is_udim: + # UDIM representations handle ranges in a different manner + template_data["udim"] = first_index_padded + else: + template_data["frame"] = first_index_padded + + # Construct destination collection from template + anatomy_filled = anatomy.format(template_data) + template_filled = anatomy_filled[template_name]["path"] + repre_context = template_filled.used_values + self.log.debug("Template filled: {}".format(str(template_filled))) + dst_collection = assemble([os.path.normpath(template_filled)]) + + # Update the destination indexes and padding + dst_collection.indexes.clear() + dst_collection.indexes.update(set(destination_indexes)) + dst_collection.padding = destination_padding + assert ( + len(src_collection.indexes) == len(dst_collection.indexes) + ), "This is a bug" + + # Multiple file transfers + transfers = [] + for src_file_name, dst in zip(src_collection, dst_collection): + src = os.path.join(stagingdir, src_file_name) + transfers.append((src, dst)) + + else: + # Single file + fname = files + assert not os.path.isabs(fname), ( + "Given file name is a full path" + ) + + # Manage anatomy template data + template_data.pop("frame", None) + if is_udim: + template_data["udim"] = repre["udim"][0] + + # Construct destination filepath from template + anatomy_filled = anatomy.format(template_data) + template_filled = anatomy_filled[template_name]["path"] + repre_context = template_filled.used_values + dst = os.path.normpath(template_filled) + + # Single file transfer + src = os.path.join(stagingdir, fname) + transfers = [(src, dst)] + + # todo: Are we sure the assumption each representation + # ends up in the same folder is valid? + if not instance.data.get("publishDir"): + instance.data["publishDir"] = ( + anatomy_filled + [template_name] + ["folder"] + ) + + for key in self.db_representation_context_keys: + # Also add these values to the context even if not used by the + # destination template + value = template_data.get(key) + if not value: + continue + repre_context[key] = template_data[key] + + # Explicitly store the full list even though template data might + # have a different value because it uses just a single udim tile + if repre.get("udim"): + repre_context["udim"] = repre.get("udim") # store list + + # Use previous representation's id if there is a name match + existing = existing_repres_by_name.get(repre["name"].lower()) + if existing: + repre_id = existing["_id"] + else: + repre_id = ObjectId() + + # Backwards compatibility: + # Store first transferred destination as published path data + # todo: can we remove this? + # todo: We shouldn't change data that makes its way back into + # instance.data[] until we know the publish actually succeeded + # otherwise `published_path` might not actually be valid? + published_path = transfers[0][1] + repre["published_path"] = published_path # Backwards compatibility + + # todo: `repre` is not the actual `representation` entity + # we should simplify/clarify difference between data above + # and the actual representation entity for the database + data = repre.get("data", {}) + data.update({'path': published_path, 'template': template}) + representation = { + "_id": repre_id, + "schema": "openpype:representation-2.0", + "type": "representation", + "parent": version["_id"], + "name": repre['name'], + "data": data, + + # Imprint shortcut to context for performance reasons. + "context": repre_context + } + + # todo: simplify/streamline which additional data makes its way into + # the representation context + if repre.get("outputName"): + representation["context"]["output"] = repre['outputName'] + + if is_sequence_representation and repre.get("frameStart") is not None: + representation['context']['frame'] = template_data["frame"] + + return { + "representation": representation, + "anatomy_data": template_data, + "transfers": transfers, + # todo: avoid the need for 'published_files' used by Integrate Hero + # backwards compatibility + "published_files": [transfer[1] for transfer in transfers] + } + + def create_version_data(self, instance): + """Create the data dictionary for the version + + Args: + instance: the current instance being published + + Returns: + dict: the required information for version["data"] + """ + + context = instance.context + + # create relative source path for DB + if "source" in instance.data: + source = instance.data["source"] + else: + source = context.data["currentFile"] + anatomy = instance.context.data["anatomy"] + source = self.get_rootless_path(anatomy, source) + self.log.debug("Source: {}".format(source)) + + version_data = { + "families": get_instance_families(instance), + "time": context.data["time"], + "author": context.data["user"], + "source": source, + "comment": context.data.get("comment"), + "machine": context.data.get("machine"), + "fps": instance.data.get("fps", context.data.get("fps")) + } + + # todo: preferably we wouldn't need this "if dict" etc. logic and + # instead be able to rely what the input value is if it's set. + intent_value = context.data.get("intent") + if intent_value and isinstance(intent_value, dict): + intent_value = intent_value.get("value") + + if intent_value: + version_data["intent"] = intent_value + + # Include optional data if present in + optionals = [ + "frameStart", "frameEnd", "step", "handles", + "handleEnd", "handleStart", "sourceHashes" + ] + for key in optionals: + if key in instance.data: + version_data[key] = instance.data[key] + + # Include instance.data[versionData] directly + version_data_instance = instance.data.get('versionData') + if version_data_instance: + version_data.update(version_data_instance) + + return version_data + + def get_template_name(self, instance): + """Return anatomy template name to use for integration""" + # Define publish template name from profiles + filter_criteria = self.get_profile_filter_criteria(instance) + profile = filter_profiles(self.template_name_profiles, + filter_criteria, + logger=self.log) + if profile: + return profile["template_name"] + else: + return self.default_template_name + + def get_profile_filter_criteria(self, instance): + """Return filter criteria for `filter_profiles`""" + # Anatomy data is pre-filled by Collectors + anatomy_data = instance.data["anatomyData"] + + # Task can be optional in anatomy data + task = anatomy_data.get("task", {}) + + # Return filter criteria + return { + "families": anatomy_data["family"], + "tasks": task.get("name"), + "hosts": anatomy_data["app"], + "task_types": task.get("type") + } + + def get_rootless_path(self, anatomy, path): + """Returns, if possible, path without absolute portion from root + (eg. 'c:\' or '/opt/..') + + This information is platform dependent and shouldn't be captured. + Example: + 'c:/projects/MyProject1/Assets/publish...' > + '{root}/MyProject1/Assets...' + + Args: + anatomy: anatomy part from instance + path: path (absolute) + Returns: + path: modified path if possible, or unmodified path + + warning logged + """ + success, rootless_path = anatomy.find_root_template_from_path(path) + if success: + path = rootless_path + else: + self.log.warning(( + "Could not find root path for remapping \"{}\"." + " This may cause issues on farm." + ).format(path)) + return path + + def get_files_info(self, destinations, sites, anatomy): + """Prepare 'files' info portion for representations. + + Arguments: + destinations (list): List of transferred file destinations + sites (list): array of published locations + anatomy: anatomy part from instance + Returns: + output_resources: array of dictionaries to be added to 'files' key + in representation + """ + file_infos = [] + for file_path in destinations: + file_info = self.prepare_file_info(file_path, anatomy, sites=sites) + file_infos.append(file_info) + return file_infos + + def prepare_file_info(self, path, anatomy, sites): + """ Prepare information for one file (asset or resource) + + Arguments: + path: destination url of published file + anatomy: anatomy part from instance + sites: array of published locations, + [ {'name':'studio', 'created_dt':date} by default + keys expected ['studio', 'site1', 'gdrive1'] + + Returns: + dict: file info dictionary + """ + return { + "_id": ObjectId(), + "path": self.get_rootless_path(anatomy, path), + "size": os.path.getsize(path), + "hash": openpype.api.source_hash(path), + "sites": sites + } From fd2d07e94c0fb34730547c396e09ddc314b56983 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 5 Jul 2022 09:22:29 +0200 Subject: [PATCH 201/785] Revert integrator to latest develop --- openpype/plugins/publish/integrate_new.py | 1710 +++++++++++++-------- 1 file changed, 1088 insertions(+), 622 deletions(-) diff --git a/openpype/plugins/publish/integrate_new.py b/openpype/plugins/publish/integrate_new.py index a07e8a1e0f..4c14c17dae 100644 --- a/openpype/plugins/publish/integrate_new.py +++ b/openpype/plugins/publish/integrate_new.py @@ -1,111 +1,63 @@ import os +from os.path import getsize import logging import sys import copy import clique +import errno import six +import re +import shutil +from collections import deque, defaultdict +from datetime import datetime from bson.objectid import ObjectId -from pymongo import DeleteMany, ReplaceOne, InsertOne, UpdateOne +from pymongo import DeleteOne, InsertOne import pyblish.api import openpype.api -from openpype.modules import ModulesManager from openpype.lib.profiles_filtering import filter_profiles -from openpype.lib.file_transaction import FileTransaction +from openpype.lib import ( + prepare_template_data, + create_hard_link, + StringTemplate, + TemplateUnsolved +) from openpype.pipeline import legacy_io +# this is needed until speedcopy for linux is fixed +if sys.platform == "win32": + from speedcopy import copyfile +else: + from shutil import copyfile + log = logging.getLogger(__name__) -def assemble(files): - """Convenience `clique.assemble` wrapper for files of a single collection. - - Unlike `clique.assemble` this wrapper does not allow more than a single - Collection nor any remainder files. Errors will be raised when not only - a single collection is assembled. - - Returns: - clique.Collection: A single sequence Collection - - Raises: - ValueError: Error is raised when files do not result in a single - collected Collection. - - """ - # todo: move this to lib? - # Get the sequence as a collection. The files must be of a single - # sequence and have no remainder outside of the collections. - patterns = [clique.PATTERNS["frames"]] - collections, remainder = clique.assemble(files, - minimum_items=1, - patterns=patterns) - if not collections: - raise ValueError("No collections found in files: " - "{}".format(files)) - if remainder: - raise ValueError("Files found not detected as part" - " of a sequence: {}".format(remainder)) - if len(collections) > 1: - raise ValueError("Files in sequence are not part of a" - " single sequence collection: " - "{}".format(collections)) - return collections[0] - - -def get_instance_families(instance): - """Get all families of the instance""" - # todo: move this to lib? - family = instance.data.get("family") - families = [] - if family: - families.append(family) - - for _family in (instance.data.get("families") or []): - if _family not in families: - families.append(_family) - - return families - - -def get_frame_padded(frame, padding): - """Return frame number as string with `padding` amount of padded zeros""" - return "{frame:0{padding}d}".format(padding=padding, frame=frame) - - -def get_first_frame_padded(collection): - """Return first frame as padded number from `clique.Collection`""" - start_frame = next(iter(collection.indexes)) - return get_frame_padded(start_frame, padding=collection.padding) - - -def bulk_write(writes): - """Convenience function to bulk write into active project database""" - project = legacy_io.Session["AVALON_PROJECT"] - return legacy_io._database[project].bulk_write(writes) - - class IntegrateAssetNew(pyblish.api.InstancePlugin): - """Register publish in the database and transfer files to destinations. + """Resolve any dependency issues - Steps: - 1) Register the subset and version - 2) Transfer the representation files to the destination - 3) Register the representation + This plug-in resolves any paths which, if not updated might break + the published file. - Requires: - instance.data['representations'] - must be a list and each member - must be a dictionary with following data: - 'files': list of filenames for sequence, string for single file. - Only the filename is allowed, without the folder path. - 'stagingDir': "path/to/folder/with/files" - 'name': representation name (usually the same as extension) - 'ext': file extension - optional data - "frameStart" - "frameEnd" - 'fps' - "data": additional metadata for each representation. + The order of families is important, when working with lookdev you want to + first publish the texture, update the texture paths in the nodes and then + publish the shading network. Same goes for file dependent assets. + + Requirements for instance to be correctly integrated + + instance.data['representations'] - must be a list and each member + must be a dictionary with following data: + 'files': list of filenames for sequence, string for single file. + Only the filename is allowed, without the folder path. + 'stagingDir': "path/to/folder/with/files" + 'name': representation name (usually the same as extension) + 'ext': file extension + optional data + "frameStart" + "frameEnd" + 'fps' + "data": additional metadata for each representation. """ label = "Integrate Asset New" @@ -140,6 +92,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): "source", "matchmove", "image", + "source", "assembly", "fbx", "textures", @@ -156,51 +109,157 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): "usd", "staticMesh", "skeletalMesh", - "usdComposition", - "usdOverride", + "mvLook", + "mvUsd", + "mvUsdComposition", + "mvUsdOverride", "simpleUnrealTexture" ] - exclude_families = ["clip", "render.farm"] - default_template_name = "publish" - - # Representation context keys that should always be written to - # the database even if not used by the destination template + exclude_families = ["render.farm"] db_representation_context_keys = [ "project", "asset", "task", "subset", "version", "representation", - "family", "hierarchy", "username" + "family", "hierarchy", "task", "username" ] + default_template_name = "publish" + + # suffix to denote temporary files, use without '.' + TMP_FILE_EXT = 'tmp' + + # file_url : file_size of all published and uploaded files + integrated_file_sizes = {} # Attributes set by settings template_name_profiles = None + subset_grouping_profiles = None def process(self, instance): + for ef in self.exclude_families: + if ( + instance.data["family"] == ef or + ef in instance.data["families"]): + self.log.debug("Excluded family '{}' in '{}' or {}".format( + ef, instance.data["family"], instance.data["families"])) + return - # Exclude instances that also contain families from exclude families - families = set(get_instance_families(instance)) - exclude = families & set(self.exclude_families) - if exclude: - self.log.debug("Instance not integrated due to exclude " - "families found: {}".format(", ".join(exclude))) + # instance should be published on a farm + if instance.data.get("farm"): return - file_transactions = FileTransaction(log=self.log) + # Prepare repsentations that should be integrated + repres = instance.data.get("representations") + # Raise error if instance don't have any representations + if not repres: + raise ValueError( + "Instance {} has no files to transfer".format( + instance.data["family"] + ) + ) + + # Validate type of stored representations + if not isinstance(repres, (list, tuple)): + raise TypeError( + "Instance 'files' must be a list, got: {0} {1}".format( + str(type(repres)), str(repres) + ) + ) + + # Filter representations + filtered_repres = [] + for repre in repres: + if "delete" in repre.get("tags", []): + continue + filtered_repres.append(repre) + + # Skip instance if there are not representations to integrate + # all representations should not be integrated + if not filtered_repres: + self.log.warning(( + "Skipping, there are no representations" + " to integrate for instance {}" + ).format(instance.data["family"])) + return + + self.integrated_file_sizes = {} try: - self.register(instance, file_transactions) + self.register(instance, filtered_repres) + self.log.info("Integrated Asset in to the database ...") + self.log.info("instance.data: {}".format(instance.data)) + self.handle_destination_files(self.integrated_file_sizes, + 'finalize') except Exception: # clean destination - # todo: preferably we'd also rollback *any* changes to the database - file_transactions.rollback() self.log.critical("Error when registering", exc_info=True) + self.handle_destination_files(self.integrated_file_sizes, 'remove') six.reraise(*sys.exc_info()) - # Finalizing can't rollback safely so no use for moving it to - # the try, except. - file_transactions.finalize() + def register(self, instance, repres): + # Required environment variables + anatomy_data = instance.data["anatomyData"] - def register(self, instance, file_transactions): + legacy_io.install() - instance_stagingdir = instance.data.get("stagingDir") - if not instance_stagingdir: + context = instance.context + + project_entity = instance.data["projectEntity"] + + context_asset_name = None + context_asset_doc = context.data.get("assetEntity") + if context_asset_doc: + context_asset_name = context_asset_doc["name"] + + asset_name = instance.data["asset"] + asset_entity = instance.data.get("assetEntity") + if not asset_entity or asset_entity["name"] != context_asset_name: + asset_entity = legacy_io.find_one({ + "type": "asset", + "name": asset_name, + "parent": project_entity["_id"] + }) + assert asset_entity, ( + "No asset found by the name \"{0}\" in project \"{1}\"" + ).format(asset_name, project_entity["name"]) + + instance.data["assetEntity"] = asset_entity + + # update anatomy data with asset specific keys + # - name should already been set + hierarchy = "" + parents = asset_entity["data"]["parents"] + if parents: + hierarchy = "/".join(parents) + anatomy_data["hierarchy"] = hierarchy + + # Make sure task name in anatomy data is same as on instance.data + asset_tasks = ( + asset_entity.get("data", {}).get("tasks") + ) or {} + task_name = instance.data.get("task") + if task_name: + task_info = asset_tasks.get(task_name) or {} + task_type = task_info.get("type") + + project_task_types = project_entity["config"]["tasks"] + task_code = project_task_types.get(task_type, {}).get("short_name") + anatomy_data["task"] = { + "name": task_name, + "type": task_type, + "short": task_code + } + + elif "task" in anatomy_data: + # Just set 'task_name' variable to context task + task_name = anatomy_data["task"]["name"] + task_type = anatomy_data["task"]["type"] + + else: + task_name = None + task_type = None + + # Fill family in anatomy data + anatomy_data["family"] = instance.data.get("family") + + stagingdir = instance.data.get("stagingDir") + if not stagingdir: self.log.info(( "{0} is missing reference to staging directory." " Will try to get it from representation." @@ -208,515 +267,718 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): else: self.log.debug( - "Establishing staging directory " - "@ {0}".format(instance_stagingdir) + "Establishing staging directory @ {0}".format(stagingdir) ) - # Ensure at least one representation is set up for registering. - repres = instance.data.get("representations") - assert repres, "Instance has no representations data" - assert isinstance(repres, (list, tuple)), ( - "Instance 'representations' must be a list, got: {0} {1}".format( - str(type(repres)), str(repres) - ) + subset = self.get_subset(asset_entity, instance) + instance.data["subsetEntity"] = subset + + version_number = instance.data["version"] + self.log.debug("Next version: v{}".format(version_number)) + + version_data = self.create_version_data(context, instance) + + version_data_instance = instance.data.get('versionData') + if version_data_instance: + version_data.update(version_data_instance) + + # TODO rename method from `create_version` to + # `prepare_version` or similar... + version = self.create_version( + subset=subset, + version_number=version_number, + data=version_data ) - template_name = self.get_template_name(instance) + self.log.debug("Creating version ...") - subset, subset_writes = self.prepare_subset(instance) - version, version_writes = self.prepare_version(instance, subset) + new_repre_names_low = [ + _repre["name"].lower() + for _repre in repres + ] + + existing_version = legacy_io.find_one({ + 'type': 'version', + 'parent': subset["_id"], + 'name': version_number + }) + + if existing_version is None: + version_id = legacy_io.insert_one(version).inserted_id + else: + # Check if instance have set `append` mode which cause that + # only replicated representations are set to archive + append_repres = instance.data.get("append", False) + + # Update version data + # TODO query by _id and + legacy_io.update_many({ + 'type': 'version', + 'parent': subset["_id"], + 'name': version_number + }, { + '$set': version + }) + version_id = existing_version['_id'] + + # Find representations of existing version and archive them + current_repres = list(legacy_io.find({ + "type": "representation", + "parent": version_id + })) + bulk_writes = [] + for repre in current_repres: + if append_repres: + # archive only duplicated representations + if repre["name"].lower() not in new_repre_names_low: + continue + # Representation must change type, + # `_id` must be stored to other key and replaced with new + # - that is because new representations should have same ID + repre_id = repre["_id"] + bulk_writes.append(DeleteOne({"_id": repre_id})) + + repre["orig_id"] = repre_id + repre["_id"] = ObjectId() + repre["type"] = "archived_representation" + bulk_writes.append(InsertOne(repre)) + + # bulk updates + if bulk_writes: + project_name = legacy_io.Session["AVALON_PROJECT"] + legacy_io.database[project_name].bulk_write( + bulk_writes + ) + + version = legacy_io.find_one({"_id": version_id}) instance.data["versionEntity"] = version - # Get existing representations (if any) - existing_repres_by_name = { - repres["name"].lower(): repres for repres in legacy_io.find( - { - "parent": version["_id"], - "type": "representation" - }, - # Only care about id and name of existing representations - projection={"_id": True, "name": True} - ) + existing_repres = list(legacy_io.find({ + "parent": version_id, + "type": "archived_representation" + })) + + instance.data['version'] = version['name'] + + intent_value = instance.context.data.get("intent") + if intent_value and isinstance(intent_value, dict): + intent_value = intent_value.get("value") + + if intent_value: + anatomy_data["intent"] = intent_value + + anatomy = instance.context.data['anatomy'] + + # Find the representations to transfer amongst the files + # Each should be a single representation (as such, a single extension) + representations = [] + destination_list = [] + + orig_transfers = [] + if 'transfers' not in instance.data: + instance.data['transfers'] = [] + else: + orig_transfers = list(instance.data['transfers']) + + family = self.main_family_from_instance(instance) + + key_values = { + "families": family, + "tasks": task_name, + "hosts": instance.context.data["hostName"], + "task_types": task_type } - - # Prepare all representations - prepared_representations = [] - for repre in instance.data["representations"]: - - if "delete" in repre.get("tags", []): - self.log.debug("Skipping representation marked for deletion: " - "{}".format(repre)) - continue - - # todo: reduce/simplify what is returned from this function - prepared = self.prepare_representation(repre, - template_name, - existing_repres_by_name, - version, - instance_stagingdir, - instance) - - for src, dst in prepared["transfers"]: - # todo: add support for hardlink transfers - file_transactions.add(src, dst) - - prepared_representations.append(prepared) - - if not prepared_representations: - # Even though we check `instance.data["representations"]` earlier - # this could still happen if all representations were tagged with - # "delete" and thus are skipped for integration - raise RuntimeError("No representations prepared to publish.") - - # Each instance can also have pre-defined transfers not explicitly - # part of a representation - like texture resources used by a - # .ma representation. Those destination paths are pre-defined, etc. - # todo: should we move or simplify this logic? - resource_destinations = set() - for src, dst in instance.data.get("transfers", []): - file_transactions.add(src, dst, mode=FileTransaction.MODE_COPY) - resource_destinations.add(os.path.abspath(dst)) - for src, dst in instance.data.get("hardlinks", []): - file_transactions.add(src, dst, mode=FileTransaction.MODE_HARDLINK) - resource_destinations.add(os.path.abspath(dst)) - - # Bulk write to the database - # We write the subset and version to the database before the File - # Transaction to reduce the chances of another publish trying to - # publish to the same version number since that chance can greatly - # increase if the file transaction takes a long time. - bulk_write(subset_writes + version_writes) - self.log.info("Subset {subset[name]} and Version {version[name]} " - "written to database..".format(subset=subset, - version=version)) - - # Process all file transfers of all integrations now - self.log.debug("Integrating source files to destination ...") - file_transactions.process() - self.log.debug("Backed up existing files: " - "{}".format(file_transactions.backups)) - self.log.debug("Transferred files: " - "{}".format(file_transactions.transferred)) - self.log.debug("Retrieving Representation Site Sync information ...") - - # Get the accessible sites for Site Sync - manager = ModulesManager() - sync_server_module = manager.modules_by_name["sync_server"] - sites = sync_server_module.compute_resource_sync_sites( - project_name=instance.data["projectEntity"]["name"] + profile = filter_profiles( + self.template_name_profiles, + key_values, + logger=self.log ) - self.log.debug("Sync Server Sites: {}".format(sites)) - # Compute the resource file infos once (files belonging to the - # version instance instead of an individual representation) so - # we can re-use those file infos per representation - anatomy = instance.context.data["anatomy"] - resource_file_infos = self.get_files_info(resource_destinations, - sites=sites, - anatomy=anatomy) + template_name = "publish" + if profile: + template_name = profile["template_name"] - # Finalize the representations now the published files are integrated - # Get 'files' info for representations and its attached resources - representation_writes = [] - new_repre_names_low = set() - for prepared in prepared_representations: - representation = prepared["representation"] - transfers = prepared["transfers"] - destinations = [dst for src, dst in transfers] + published_representations = {} + for idx, repre in enumerate(repres): + published_files = [] + + # create template data for Anatomy + template_data = copy.deepcopy(anatomy_data) + if intent_value is not None: + template_data["intent"] = intent_value + + resolution_width = repre.get("resolutionWidth") + resolution_height = repre.get("resolutionHeight") + fps = instance.data.get("fps") + + if resolution_width: + template_data["resolution_width"] = resolution_width + if resolution_width: + template_data["resolution_height"] = resolution_height + if resolution_width: + template_data["fps"] = fps + + if "originalBasename" in instance.data: + template_data.update({ + "originalBasename": instance.data.get("originalBasename") + }) + + files = repre['files'] + if repre.get('stagingDir'): + stagingdir = repre['stagingDir'] + + if repre.get("outputName"): + template_data["output"] = repre['outputName'] + + template_data["representation"] = repre["name"] + + ext = repre["ext"] + if ext.startswith("."): + self.log.warning(( + "Implementaion warning: <\"{}\">" + " Representation's extension stored under \"ext\" key " + " started with dot (\"{}\")." + ).format(repre["name"], ext)) + ext = ext[1:] + repre["ext"] = ext + template_data["ext"] = ext + + self.log.info(template_name) + template = os.path.normpath( + anatomy.templates[template_name]["path"]) + + sequence_repre = isinstance(files, list) + repre_context = None + if sequence_repre: + self.log.debug( + "files: {}".format(files)) + src_collections, remainder = clique.assemble(files) + self.log.debug( + "src_tail_collections: {}".format(str(src_collections))) + src_collection = src_collections[0] + + # Assert that each member has identical suffix + src_head = src_collection.format("{head}") + src_tail = src_collection.format("{tail}") + + # fix dst_padding + valid_files = [x for x in files if src_collection.match(x)] + padd_len = len( + valid_files[0].replace(src_head, "").replace(src_tail, "") + ) + src_padding_exp = "%0{}d".format(padd_len) + + test_dest_files = list() + for i in [1, 2]: + template_data["representation"] = repre['ext'] + if not repre.get("udim"): + template_data["frame"] = src_padding_exp % i + else: + template_data["udim"] = src_padding_exp % i + + anatomy_filled = anatomy.format(template_data) + template_filled = anatomy_filled[template_name]["path"] + if repre_context is None: + repre_context = template_filled.used_values + test_dest_files.append( + os.path.normpath(template_filled) + ) + if not repre.get("udim"): + template_data["frame"] = repre_context["frame"] + else: + template_data["udim"] = repre_context["udim"] + + self.log.debug( + "test_dest_files: {}".format(str(test_dest_files))) + + dst_collections, remainder = clique.assemble(test_dest_files) + dst_collection = dst_collections[0] + dst_head = dst_collection.format("{head}") + dst_tail = dst_collection.format("{tail}") + + index_frame_start = None + + # TODO use frame padding from right template group + if repre.get("frameStart") is not None: + frame_start_padding = int( + anatomy.templates["render"].get( + "frame_padding", + anatomy.templates["render"].get("padding") + ) + ) + + index_frame_start = int(repre.get("frameStart")) + + # exception for slate workflow + if index_frame_start and "slate" in instance.data["families"]: + index_frame_start -= 1 + + dst_padding_exp = src_padding_exp + dst_start_frame = None + collection_start = list(src_collection.indexes)[0] + for i in src_collection.indexes: + # TODO 1.) do not count padding in each index iteration + # 2.) do not count dst_padding from src_padding before + # index_frame_start check + frame_number = i - collection_start + src_padding = src_padding_exp % i + + src_file_name = "{0}{1}{2}".format( + src_head, src_padding, src_tail) + + dst_padding = src_padding_exp % frame_number + + if index_frame_start is not None: + dst_padding_exp = "%0{}d".format(frame_start_padding) + dst_padding = dst_padding_exp % (index_frame_start + frame_number) # noqa: E501 + elif repre.get("udim"): + dst_padding = int(i) + + dst = "{0}{1}{2}".format( + dst_head, + dst_padding, + dst_tail + ) + + self.log.debug("destination: `{}`".format(dst)) + src = os.path.join(stagingdir, src_file_name) + + self.log.debug("source: {}".format(src)) + instance.data["transfers"].append([src, dst]) + + published_files.append(dst) + + # for adding first frame into db + if not dst_start_frame: + dst_start_frame = dst_padding + + # Store used frame value to template data + if repre.get("frame"): + template_data["frame"] = dst_start_frame + + dst = "{0}{1}{2}".format( + dst_head, + dst_start_frame, + dst_tail + ) + repre['published_path'] = dst + + else: + # Single file + # _______ + # | |\ + # | | + # | | + # | | + # |_______| + # + template_data.pop("frame", None) + fname = files + assert not os.path.isabs(fname), ( + "Given file name is a full path" + ) + + template_data["representation"] = repre['ext'] + # Store used frame value to template data + if repre.get("udim"): + template_data["udim"] = repre["udim"][0] + src = os.path.join(stagingdir, fname) + anatomy_filled = anatomy.format(template_data) + template_filled = anatomy_filled[template_name]["path"] + repre_context = template_filled.used_values + dst = os.path.normpath(template_filled) + + instance.data["transfers"].append([src, dst]) + + published_files.append(dst) + repre['published_path'] = dst + self.log.debug("__ dst: {}".format(dst)) + + if not instance.data.get("publishDir"): + instance.data["publishDir"] = ( + anatomy_filled + [template_name] + ["folder"] + ) + if repre.get("udim"): + repre_context["udim"] = repre.get("udim") # store list + + repre["publishedFiles"] = published_files + + for key in self.db_representation_context_keys: + value = template_data.get(key) + if not value: + continue + repre_context[key] = template_data[key] + + # Use previous representation's id if there are any + repre_id = None + repre_name_low = repre["name"].lower() + for _repre in existing_repres: + # NOTE should we check lowered names? + if repre_name_low == _repre["name"]: + repre_id = _repre["orig_id"] + break + + # Create new id if existing representations does not match + if repre_id is None: + repre_id = ObjectId() + + data = repre.get("data") or {} + data.update({'path': dst, 'template': template}) + representation = { + "_id": repre_id, + "schema": "openpype:representation-2.0", + "type": "representation", + "parent": version_id, + "name": repre['name'], + "data": data, + "dependencies": instance.data.get("dependencies", "").split(), + + # Imprint shortcut to context + # for performance reasons. + "context": repre_context + } + + if repre.get("outputName"): + representation["context"]["output"] = repre['outputName'] + + if sequence_repre and repre.get("frameStart") is not None: + representation['context']['frame'] = ( + dst_padding_exp % int(repre.get("frameStart")) + ) + + # any file that should be physically copied is expected in + # 'transfers' or 'hardlinks' + if instance.data.get('transfers', False) or \ + instance.data.get('hardlinks', False): + # could throw exception, will be caught in 'process' + # all integration to DB is being done together lower, + # so no rollback needed + self.log.debug("Integrating source files to destination ...") + self.integrated_file_sizes.update(self.integrate(instance)) + self.log.debug("Integrated files {}". + format(self.integrated_file_sizes)) + + # get 'files' info for representation and all attached resources + self.log.debug("Preparing files information ...") representation["files"] = self.get_files_info( - destinations, sites=sites, anatomy=anatomy - ) + instance, + self.integrated_file_sizes) - # Add the version resource file infos to each representation - representation["files"] += resource_file_infos + self.log.debug("__ representation: {}".format(representation)) + destination_list.append(dst) + self.log.debug("__ destination_list: {}".format(destination_list)) + instance.data['destination_list'] = destination_list + representations.append(representation) + published_representations[repre_id] = { + "representation": representation, + "anatomy_data": template_data, + "published_files": published_files + } + self.log.debug("__ representations: {}".format(representations)) + # reset transfers for next representation + # instance.data['transfers'] is used as a global variable + # in current codebase + instance.data['transfers'] = list(orig_transfers) - # Set up representation for writing to the database. Since - # we *might* be overwriting an existing entry if the version - # already existed we'll use ReplaceOnce with `upsert=True` - representation_writes.append(ReplaceOne( - filter={"_id": representation["_id"]}, - replacement=representation, - upsert=True - )) + # Remove old representations if there are any (before insertion of new) + if existing_repres: + repre_ids_to_remove = [] + for repre in existing_repres: + repre_ids_to_remove.append(repre["_id"]) + legacy_io.delete_many({"_id": {"$in": repre_ids_to_remove}}) - new_repre_names_low.add(representation["name"].lower()) + for rep in instance.data["representations"]: + self.log.debug("__ rep: {}".format(rep)) - # Delete any existing representations that didn't get any new data - # if the instance is not set to append mode - if not instance.data.get("append", False): - delete_names = set() - for name, existing_repres in existing_repres_by_name.items(): - if name not in new_repre_names_low: - # We add the exact representation name because `name` is - # lowercase for name matching only and not in the database - delete_names.add(existing_repres["name"]) - if delete_names: - representation_writes.append(DeleteMany( - filter={ - "parent": version["_id"], - "name": {"$in": list(delete_names)} - } - )) + legacy_io.insert_many(representations) + instance.data["published_representations"] = ( + published_representations + ) + # self.log.debug("Representation: {}".format(representations)) + self.log.info("Registered {} items".format(len(representations))) - # Write representations to the database - bulk_write(representation_writes) + def integrate(self, instance): + """ Move the files. - # Backwards compatibility - # todo: can we avoid the need to store this? - instance.data["published_representations"] = { - p["representation"]["_id"]: p for p in prepared_representations - } + Through `instance.data["transfers"]` - self.log.info("Registered {} representations" - "".format(len(prepared_representations))) + Args: + instance: the instance to integrate + Returns: + integrated_file_sizes: dictionary of destination file url and + its size in bytes + """ + # store destination url and size for reporting and rollback + integrated_file_sizes = {} + transfers = list(instance.data.get("transfers", list())) + for src, dest in transfers: + if os.path.normpath(src) != os.path.normpath(dest): + dest = self.get_dest_temp_url(dest) + self.copy_file(src, dest) + # TODO needs to be updated during site implementation + integrated_file_sizes[dest] = os.path.getsize(dest) - def prepare_subset(self, instance): - asset = instance.data.get("assetEntity") + # Produce hardlinked copies + # Note: hardlink can only be produced between two files on the same + # server/disk and editing one of the two will edit both files at once. + # As such it is recommended to only make hardlinks between static files + # to ensure publishes remain safe and non-edited. + hardlinks = instance.data.get("hardlinks", list()) + for src, dest in hardlinks: + dest = self.get_dest_temp_url(dest) + self.log.debug("Hardlinking file ... {} -> {}".format(src, dest)) + if not os.path.exists(dest): + self.hardlink_file(src, dest) + + # TODO needs to be updated during site implementation + integrated_file_sizes[dest] = os.path.getsize(dest) + + return integrated_file_sizes + + def copy_file(self, src, dst): + """ Copy given source to destination + + Arguments: + src (str): the source file which needs to be copied + dst (str): the destination of the sourc file + Returns: + None + """ + src = os.path.normpath(src) + dst = os.path.normpath(dst) + self.log.debug("Copying file ... {} -> {}".format(src, dst)) + dirname = os.path.dirname(dst) + try: + os.makedirs(dirname) + except OSError as e: + if e.errno == errno.EEXIST: + pass + else: + self.log.critical("An unexpected error occurred.") + six.reraise(*sys.exc_info()) + + # copy file with speedcopy and check if size of files are simetrical + while True: + if not shutil._samefile(src, dst): + copyfile(src, dst) + else: + self.log.critical( + "files are the same {} to {}".format(src, dst) + ) + os.remove(dst) + try: + shutil.copyfile(src, dst) + self.log.debug("Copying files with shutil...") + except OSError as e: + self.log.critical("Cannot copy {} to {}".format(src, dst)) + self.log.critical(e) + six.reraise(*sys.exc_info()) + if str(getsize(src)) in str(getsize(dst)): + break + + def hardlink_file(self, src, dst): + dirname = os.path.dirname(dst) + + try: + os.makedirs(dirname) + except OSError as e: + if e.errno == errno.EEXIST: + pass + else: + self.log.critical("An unexpected error occurred.") + six.reraise(*sys.exc_info()) + + create_hard_link(src, dst) + + def get_subset(self, asset, instance): subset_name = instance.data["subset"] - self.log.debug("Subset: {}".format(subset_name)) - - # Get existing subset if it exists subset = legacy_io.find_one({ "type": "subset", "parent": asset["_id"], "name": subset_name }) - # Define subset data - data = { - "families": get_instance_families(instance) - } - - subset_group = instance.data.get("subsetGroup") - if subset_group: - data["subsetGroup"] = subset_group - - bulk_writes = [] if subset is None: - # Create a new subset self.log.info("Subset '%s' not found, creating ..." % subset_name) - subset = { - "_id": ObjectId(), + self.log.debug("families. %s" % instance.data.get('families')) + self.log.debug( + "families. %s" % type(instance.data.get('families'))) + + family = instance.data.get("family") + families = [] + if family: + families.append(family) + + for _family in (instance.data.get("families") or []): + if _family not in families: + families.append(_family) + + _id = legacy_io.insert_one({ "schema": "openpype:subset-3.0", "type": "subset", "name": subset_name, - "data": data, + "data": { + "families": families + }, "parent": asset["_id"] - } - bulk_writes.append(InsertOne(subset)) + }).inserted_id - else: - # Update existing subset data with new data and set in database. - # We also change the found subset in-place so we don't need to - # re-query the subset afterwards - subset["data"].update(data) - bulk_writes.append(UpdateOne( - {"type": "subset", "_id": subset["_id"]}, - {"$set": { - "data": subset["data"] - }} - )) + subset = legacy_io.find_one({"_id": _id}) - self.log.info("Prepared subset: {}".format(subset_name)) - return subset, bulk_writes + # QUESTION Why is changing of group and updating it's + # families in 'get_subset'? + self._set_subset_group(instance, subset["_id"]) - def prepare_version(self, instance, subset): + # Update families on subset. + families = [instance.data["family"]] + families.extend(instance.data.get("families", [])) + legacy_io.update_many( + {"type": "subset", "_id": ObjectId(subset["_id"])}, + {"$set": {"data.families": families}} + ) - version_number = instance.data["version"] + return subset - version = { - "schema": "openpype:version-3.0", - "type": "version", - "parent": subset["_id"], - "name": version_number, - "data": self.create_version_data(instance) + def _set_subset_group(self, instance, subset_id): + """ + Mark subset as belonging to group in DB. + + Uses Settings > Global > Publish plugins > IntegrateAssetNew + + Args: + instance (dict): processed instance + subset_id (str): DB's subset _id + + """ + # Fist look into instance data + subset_group = instance.data.get("subsetGroup") + if not subset_group: + subset_group = self._get_subset_group(instance) + + if subset_group: + legacy_io.update_many({ + 'type': 'subset', + '_id': ObjectId(subset_id) + }, {'$set': {'data.subsetGroup': subset_group}}) + + def _get_subset_group(self, instance): + """Look into subset group profiles set by settings. + + Attribute 'subset_grouping_profiles' is defined by OpenPype settings. + """ + # Skip if 'subset_grouping_profiles' is empty + if not self.subset_grouping_profiles: + return None + + # QUESTION + # - is there a chance that task name is not filled in anatomy + # data? + # - should we use context task in that case? + anatomy_data = instance.data["anatomyData"] + task_name = None + task_type = None + if "task" in anatomy_data: + task_name = anatomy_data["task"]["name"] + task_type = anatomy_data["task"]["type"] + filtering_criteria = { + "families": instance.data["family"], + "hosts": instance.context.data["hostName"], + "tasks": task_name, + "task_types": task_type } + matching_profile = filter_profiles( + self.subset_grouping_profiles, + filtering_criteria + ) + # Skip if there is not matchin profile + if not matching_profile: + return None - existing_version = legacy_io.find_one({ - 'type': 'version', - 'parent': subset["_id"], - 'name': version_number - }, projection={"_id": True}) + filled_template = None + template = matching_profile["template"] + fill_pairs = ( + ("family", filtering_criteria["families"]), + ("task", filtering_criteria["tasks"]), + ("host", filtering_criteria["hosts"]), + ("subset", instance.data["subset"]), + ("renderlayer", instance.data.get("renderlayer")) + ) + fill_pairs = prepare_template_data(fill_pairs) - if existing_version: - self.log.debug("Updating existing version ...") - version["_id"] = existing_version["_id"] - else: - self.log.debug("Creating new version ...") - version["_id"] = ObjectId() - - bulk_writes = [ReplaceOne( - filter={"_id": version["_id"]}, - replacement=version, - upsert=True - )] - - self.log.info("Prepared version: v{0:03d}".format(version["name"])) - - return version, bulk_writes - - def prepare_representation(self, repre, - template_name, - existing_repres_by_name, - version, - instance_stagingdir, - instance): - - # pre-flight validations - if repre["ext"].startswith("."): - raise ValueError("Extension must not start with a dot '.': " - "{}".format(repre["ext"])) - - if repre.get("transfers"): - raise ValueError("Representation is not allowed to have transfers" - "data before integration. They are computed in " - "the integrator" - "Got: {}".format(repre["transfers"])) - - # create template data for Anatomy - template_data = copy.deepcopy(instance.data["anatomyData"]) - - # required representation keys - files = repre['files'] - template_data["representation"] = repre["name"] - template_data["ext"] = repre["ext"] - - # optionals - # retrieve additional anatomy data from representation if exists - for key, anatomy_key in { - # Representation Key: Anatomy data key - "resolutionWidth": "resolution_width", - "resolutionHeight": "resolution_height", - "fps": "fps", - "outputName": "output", - "originalBasename": "originalBasename" - }.items(): - # Allow to take value from representation - # if not found also consider instance.data - if key in repre: - value = repre[key] - elif key in instance.data: - value = instance.data[key] - else: - continue - template_data[anatomy_key] = value - - if repre.get('stagingDir'): - stagingdir = repre['stagingDir'] - else: - # Fall back to instance staging dir if not explicitly - # set for representation in the instance - self.log.debug("Representation uses instance staging dir: " - "{}".format(instance_stagingdir)) - stagingdir = instance_stagingdir - if not stagingdir: - raise ValueError("No staging directory set for representation: " - "{}".format(repre)) - - self.log.debug("Anatomy template name: {}".format(template_name)) - anatomy = instance.context.data['anatomy'] - template = os.path.normpath(anatomy.templates[template_name]["path"]) - - is_udim = bool(repre.get("udim")) - is_sequence_representation = isinstance(files, (list, tuple)) - if is_sequence_representation: - # Collection of files (sequence) - assert not any(os.path.isabs(fname) for fname in files), ( - "Given file names contain full paths" + try: + filled_template = StringTemplate.format_strict_template( + template, fill_pairs ) + except (KeyError, TemplateUnsolved): + keys = [] + if fill_pairs: + keys = fill_pairs.keys() - src_collection = assemble(files) + msg = "Subset grouping failed. " \ + "Only {} are expected in Settings".format(','.join(keys)) + self.log.warning(msg) - # If the representation has `frameStart` set it renumbers the - # frame indices of the published collection. It will start from - # that `frameStart` index instead. Thus if that frame start - # differs from the collection we want to shift the destination - # frame indices from the source collection. - destination_indexes = list(src_collection.indexes) - destination_padding = len(get_first_frame_padded(src_collection)) - if repre.get("frameStart") is not None and not is_udim: - index_frame_start = int(repre.get("frameStart")) + return filled_template - render_template = anatomy.templates[template_name] - # todo: should we ALWAYS manage the frame padding even when not - # having `frameStart` set? - frame_start_padding = int( - render_template.get( - "frame_padding", - render_template.get("padding") - ) - ) - - # Shift destination sequence to the start frame - src_start_frame = next(iter(src_collection.indexes)) - shift = index_frame_start - src_start_frame - if shift: - destination_indexes = [ - frame + shift for frame in destination_indexes - ] - destination_padding = frame_start_padding - - # To construct the destination template with anatomy we require - # a Frame or UDIM tile set for the template data. We use the first - # index of the destination for that because that could've shifted - # from the source indexes, etc. - first_index_padded = get_frame_padded(frame=destination_indexes[0], - padding=destination_padding) - if is_udim: - # UDIM representations handle ranges in a different manner - template_data["udim"] = first_index_padded - else: - template_data["frame"] = first_index_padded - - # Construct destination collection from template - anatomy_filled = anatomy.format(template_data) - template_filled = anatomy_filled[template_name]["path"] - repre_context = template_filled.used_values - self.log.debug("Template filled: {}".format(str(template_filled))) - dst_collection = assemble([os.path.normpath(template_filled)]) - - # Update the destination indexes and padding - dst_collection.indexes.clear() - dst_collection.indexes.update(set(destination_indexes)) - dst_collection.padding = destination_padding - assert ( - len(src_collection.indexes) == len(dst_collection.indexes) - ), "This is a bug" - - # Multiple file transfers - transfers = [] - for src_file_name, dst in zip(src_collection, dst_collection): - src = os.path.join(stagingdir, src_file_name) - transfers.append((src, dst)) - - else: - # Single file - fname = files - assert not os.path.isabs(fname), ( - "Given file name is a full path" - ) - - # Manage anatomy template data - template_data.pop("frame", None) - if is_udim: - template_data["udim"] = repre["udim"][0] - - # Construct destination filepath from template - anatomy_filled = anatomy.format(template_data) - template_filled = anatomy_filled[template_name]["path"] - repre_context = template_filled.used_values - dst = os.path.normpath(template_filled) - - # Single file transfer - src = os.path.join(stagingdir, fname) - transfers = [(src, dst)] - - # todo: Are we sure the assumption each representation - # ends up in the same folder is valid? - if not instance.data.get("publishDir"): - instance.data["publishDir"] = ( - anatomy_filled - [template_name] - ["folder"] - ) - - for key in self.db_representation_context_keys: - # Also add these values to the context even if not used by the - # destination template - value = template_data.get(key) - if not value: - continue - repre_context[key] = template_data[key] - - # Explicitly store the full list even though template data might - # have a different value because it uses just a single udim tile - if repre.get("udim"): - repre_context["udim"] = repre.get("udim") # store list - - # Use previous representation's id if there is a name match - existing = existing_repres_by_name.get(repre["name"].lower()) - if existing: - repre_id = existing["_id"] - else: - repre_id = ObjectId() - - # Backwards compatibility: - # Store first transferred destination as published path data - # todo: can we remove this? - # todo: We shouldn't change data that makes its way back into - # instance.data[] until we know the publish actually succeeded - # otherwise `published_path` might not actually be valid? - published_path = transfers[0][1] - repre["published_path"] = published_path # Backwards compatibility - - # todo: `repre` is not the actual `representation` entity - # we should simplify/clarify difference between data above - # and the actual representation entity for the database - data = repre.get("data", {}) - data.update({'path': published_path, 'template': template}) - representation = { - "_id": repre_id, - "schema": "openpype:representation-2.0", - "type": "representation", - "parent": version["_id"], - "name": repre['name'], - "data": data, - - # Imprint shortcut to context for performance reasons. - "context": repre_context - } - - # todo: simplify/streamline which additional data makes its way into - # the representation context - if repre.get("outputName"): - representation["context"]["output"] = repre['outputName'] - - if is_sequence_representation and repre.get("frameStart") is not None: - representation['context']['frame'] = template_data["frame"] - - return { - "representation": representation, - "anatomy_data": template_data, - "transfers": transfers, - # todo: avoid the need for 'published_files' used by Integrate Hero - # backwards compatibility - "published_files": [transfer[1] for transfer in transfers] - } - - def create_version_data(self, instance): - """Create the data dictionary for the version + def create_version(self, subset, version_number, data=None): + """ Copy given source to destination Args: + subset (dict): the registered subset of the asset + version_number (int): the version number + + Returns: + dict: collection of data to create a version + """ + + return {"schema": "openpype:version-3.0", + "type": "version", + "parent": subset["_id"], + "name": version_number, + "data": data} + + def create_version_data(self, context, instance): + """Create the data collection for the version + + Args: + context: the current context instance: the current instance being published Returns: - dict: the required information for version["data"] + dict: the required information with instance.data as key """ - context = instance.context + families = [] + current_families = instance.data.get("families", list()) + instance_family = instance.data.get("family", None) + + if instance_family is not None: + families.append(instance_family) + families += current_families # create relative source path for DB - if "source" in instance.data: - source = instance.data["source"] - else: + source = instance.data.get("source") + if not source: source = context.data["currentFile"] anatomy = instance.context.data["anatomy"] source = self.get_rootless_path(anatomy, source) - self.log.debug("Source: {}".format(source)) + self.log.debug("Source: {}".format(source)) version_data = { - "families": get_instance_families(instance), + "families": families, "time": context.data["time"], "author": context.data["user"], "source": source, "comment": context.data.get("comment"), "machine": context.data.get("machine"), - "fps": instance.data.get("fps", context.data.get("fps")) + "fps": context.data.get( + "fps", instance.data.get("fps") + ) } - # todo: preferably we wouldn't need this "if dict" etc. logic and - # instead be able to rely what the input value is if it's set. - intent_value = context.data.get("intent") + intent_value = instance.context.data.get("intent") if intent_value and isinstance(intent_value, dict): intent_value = intent_value.get("value") @@ -732,58 +994,33 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): if key in instance.data: version_data[key] = instance.data[key] - # Include instance.data[versionData] directly - version_data_instance = instance.data.get('versionData') - if version_data_instance: - version_data.update(version_data_instance) - return version_data - def get_template_name(self, instance): - """Return anatomy template name to use for integration""" - # Define publish template name from profiles - filter_criteria = self.get_profile_filter_criteria(instance) - profile = filter_profiles(self.template_name_profiles, - filter_criteria, - logger=self.log) - if profile: - return profile["template_name"] - else: - return self.default_template_name - - def get_profile_filter_criteria(self, instance): - """Return filter criteria for `filter_profiles`""" - # Anatomy data is pre-filled by Collectors - anatomy_data = instance.data["anatomyData"] - - # Task can be optional in anatomy data - task = anatomy_data.get("task", {}) - - # Return filter criteria - return { - "families": anatomy_data["family"], - "tasks": task.get("name"), - "hosts": anatomy_data["app"], - "task_types": task.get("type") - } + def main_family_from_instance(self, instance): + """Returns main family of entered instance.""" + family = instance.data.get("family") + if not family: + family = instance.data["families"][0] + return family def get_rootless_path(self, anatomy, path): - """Returns, if possible, path without absolute portion from root - (eg. 'c:\' or '/opt/..') - - This information is platform dependent and shouldn't be captured. - Example: - 'c:/projects/MyProject1/Assets/publish...' > - '{root}/MyProject1/Assets...' + """ Returns, if possible, path without absolute portion from host + (eg. 'c:\' or '/opt/..') + This information is host dependent and shouldn't be captured. + Example: + 'c:/projects/MyProject1/Assets/publish...' > + '{root}/MyProject1/Assets...' Args: - anatomy: anatomy part from instance - path: path (absolute) + anatomy: anatomy part from instance + path: path (absolute) Returns: - path: modified path if possible, or unmodified path - + warning logged + path: modified path if possible, or unmodified path + + warning logged """ - success, rootless_path = anatomy.find_root_template_from_path(path) + success, rootless_path = ( + anatomy.find_root_template_from_path(path) + ) if success: path = rootless_path else: @@ -793,40 +1030,269 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): ).format(path)) return path - def get_files_info(self, destinations, sites, anatomy): - """Prepare 'files' info portion for representations. + def get_files_info(self, instance, integrated_file_sizes): + """ Prepare 'files' portion for attached resources and main asset. + Combining records from 'transfers' and 'hardlinks' parts from + instance. + All attached resources should be added, currently without + Context info. Arguments: - destinations (list): List of transferred file destinations - sites (list): array of published locations - anatomy: anatomy part from instance + instance: the current instance being published + integrated_file_sizes: dictionary of destination path (absolute) + and its file size Returns: output_resources: array of dictionaries to be added to 'files' key in representation """ - file_infos = [] - for file_path in destinations: - file_info = self.prepare_file_info(file_path, anatomy, sites=sites) - file_infos.append(file_info) - return file_infos + resources = list(instance.data.get("transfers", [])) + resources.extend(list(instance.data.get("hardlinks", []))) - def prepare_file_info(self, path, anatomy, sites): + self.log.debug("get_resource_files_info.resources:{}". + format(resources)) + + output_resources = [] + anatomy = instance.context.data["anatomy"] + for _src, dest in resources: + path = self.get_rootless_path(anatomy, dest) + dest = self.get_dest_temp_url(dest) + file_hash = openpype.api.source_hash(dest) + if self.TMP_FILE_EXT and \ + ',{}'.format(self.TMP_FILE_EXT) in file_hash: + file_hash = file_hash.replace(',{}'.format(self.TMP_FILE_EXT), + '') + + file_info = self.prepare_file_info(path, + integrated_file_sizes[dest], + file_hash, + instance=instance) + output_resources.append(file_info) + + return output_resources + + def get_dest_temp_url(self, dest): + """ Enhance destination path with TMP_FILE_EXT to denote temporary + file. + Temporary files will be renamed after successful registration + into DB and full copy to destination + + Arguments: + dest: destination url of published file (absolute) + Returns: + dest: destination path + '.TMP_FILE_EXT' + """ + if self.TMP_FILE_EXT and '.{}'.format(self.TMP_FILE_EXT) not in dest: + dest += '.{}'.format(self.TMP_FILE_EXT) + return dest + + def prepare_file_info(self, path, size=None, file_hash=None, + sites=None, instance=None): """ Prepare information for one file (asset or resource) Arguments: - path: destination url of published file - anatomy: anatomy part from instance - sites: array of published locations, - [ {'name':'studio', 'created_dt':date} by default - keys expected ['studio', 'site1', 'gdrive1'] - + path: destination url of published file (rootless) + size(optional): size of file in bytes + file_hash(optional): hash of file for synchronization validation + sites(optional): array of published locations, + [ {'name':'studio', 'created_dt':date} by default + keys expected ['studio', 'site1', 'gdrive1'] + instance(dict, optional): to get collected settings Returns: - dict: file info dictionary + rec: dictionary with filled info """ - return { + local_site = 'studio' # default + remote_site = None + always_accesible = [] + sync_project_presets = None + + rec = { "_id": ObjectId(), - "path": self.get_rootless_path(anatomy, path), - "size": os.path.getsize(path), - "hash": openpype.api.source_hash(path), - "sites": sites + "path": path } + if size: + rec["size"] = size + + if file_hash: + rec["hash"] = file_hash + + if sites: + rec["sites"] = sites + else: + system_sync_server_presets = ( + instance.context.data["system_settings"] + ["modules"] + ["sync_server"]) + log.debug("system_sett:: {}".format(system_sync_server_presets)) + + if system_sync_server_presets["enabled"]: + sync_project_presets = ( + instance.context.data["project_settings"] + ["global"] + ["sync_server"]) + + if sync_project_presets and sync_project_presets["enabled"]: + local_site, remote_site = self._get_sites(sync_project_presets) + + always_accesible = sync_project_presets["config"]. \ + get("always_accessible_on", []) + + already_attached_sites = {} + meta = {"name": local_site, "created_dt": datetime.now()} + rec["sites"] = [meta] + already_attached_sites[meta["name"]] = meta["created_dt"] + + if sync_project_presets and sync_project_presets["enabled"]: + if remote_site and \ + remote_site not in already_attached_sites.keys(): + # add remote + meta = {"name": remote_site.strip()} + rec["sites"].append(meta) + already_attached_sites[meta["name"]] = None + + # add alternative sites + rec, already_attached_sites = self._add_alternative_sites( + system_sync_server_presets, already_attached_sites, rec) + + # add skeleton for site where it should be always synced to + for always_on_site in set(always_accesible): + if always_on_site not in already_attached_sites.keys(): + meta = {"name": always_on_site.strip()} + rec["sites"].append(meta) + already_attached_sites[meta["name"]] = None + + log.debug("final sites:: {}".format(rec["sites"])) + + return rec + + def _get_sites(self, sync_project_presets): + """Returns tuple (local_site, remote_site)""" + local_site_id = openpype.api.get_local_site_id() + local_site = sync_project_presets["config"]. \ + get("active_site", "studio").strip() + + if local_site == 'local': + local_site = local_site_id + + remote_site = sync_project_presets["config"].get("remote_site") + + if remote_site == 'local': + remote_site = local_site_id + + return local_site, remote_site + + def _add_alternative_sites(self, + system_sync_server_presets, + already_attached_sites, + rec): + """Loop through all configured sites and add alternatives. + + See SyncServerModule.handle_alternate_site + """ + conf_sites = system_sync_server_presets.get("sites", {}) + + alt_site_pairs = self._get_alt_site_pairs(conf_sites) + + already_attached_keys = list(already_attached_sites.keys()) + for added_site in already_attached_keys: + real_created = already_attached_sites[added_site] + for alt_site in alt_site_pairs.get(added_site, []): + if alt_site in already_attached_sites.keys(): + continue + meta = {"name": alt_site} + # alt site inherits state of 'created_dt' + if real_created: + meta["created_dt"] = real_created + rec["sites"].append(meta) + already_attached_sites[meta["name"]] = real_created + + return rec, already_attached_sites + + def _get_alt_site_pairs(self, conf_sites): + """Returns dict of site and its alternative sites. + + If `site` has alternative site, it means that alt_site has 'site' as + alternative site + Args: + conf_sites (dict) + Returns: + (dict): {'site': [alternative sites]...} + """ + alt_site_pairs = defaultdict(list) + for site_name, site_info in conf_sites.items(): + alt_sites = set(site_info.get("alternative_sites", [])) + alt_site_pairs[site_name].extend(alt_sites) + + for alt_site in alt_sites: + alt_site_pairs[alt_site].append(site_name) + + for site_name, alt_sites in alt_site_pairs.items(): + sites_queue = deque(alt_sites) + while sites_queue: + alt_site = sites_queue.popleft() + + # safety against wrong config + # {"SFTP": {"alternative_site": "SFTP"} + if alt_site == site_name or alt_site not in alt_site_pairs: + continue + + for alt_alt_site in alt_site_pairs[alt_site]: + if ( + alt_alt_site != site_name + and alt_alt_site not in alt_sites + ): + alt_sites.append(alt_alt_site) + sites_queue.append(alt_alt_site) + + return alt_site_pairs + + def handle_destination_files(self, integrated_file_sizes, mode): + """ Clean destination files + Called when error happened during integrating to DB or to disk + OR called to rename uploaded files from temporary name to final to + highlight publishing in progress/broken + Used to clean unwanted files + + Arguments: + integrated_file_sizes: dictionary, file urls as keys, size as value + mode: 'remove' - clean files, + 'finalize' - rename files, + remove TMP_FILE_EXT suffix denoting temp file + """ + if integrated_file_sizes: + for file_url, _file_size in integrated_file_sizes.items(): + if not os.path.exists(file_url): + self.log.debug( + "File {} was not found.".format(file_url) + ) + continue + + try: + if mode == 'remove': + self.log.debug("Removing file {}".format(file_url)) + os.remove(file_url) + if mode == 'finalize': + new_name = re.sub( + r'\.{}$'.format(self.TMP_FILE_EXT), + '', + file_url + ) + + if os.path.exists(new_name): + self.log.debug( + "Overwriting file {} to {}".format( + file_url, new_name + ) + ) + shutil.copy(file_url, new_name) + os.remove(file_url) + else: + self.log.debug( + "Renaming file {} to {}".format( + file_url, new_name + ) + ) + os.rename(file_url, new_name) + except OSError: + self.log.error("Cannot {} file {}".format(mode, file_url), + exc_info=True) + six.reraise(*sys.exc_info()) From 271a829f6d441bcf26e6ddaf33510f984dc0c703 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 5 Jul 2022 09:24:38 +0200 Subject: [PATCH 202/785] Remove duplicate source family --- openpype/plugins/publish/integrate_new.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/plugins/publish/integrate_new.py b/openpype/plugins/publish/integrate_new.py index 4c14c17dae..fd3cf8882d 100644 --- a/openpype/plugins/publish/integrate_new.py +++ b/openpype/plugins/publish/integrate_new.py @@ -92,7 +92,6 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): "source", "matchmove", "image", - "source", "assembly", "fbx", "textures", From 148ac26bf961aa8e44ffcd453efbdbb0f4a8df75 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 5 Jul 2022 09:28:00 +0200 Subject: [PATCH 203/785] Update USD families with latest develop --- openpype/plugins/publish/integrate.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/openpype/plugins/publish/integrate.py b/openpype/plugins/publish/integrate.py index 6ad0849ff7..6253a3ec11 100644 --- a/openpype/plugins/publish/integrate.py +++ b/openpype/plugins/publish/integrate.py @@ -156,8 +156,10 @@ class IntegrateAsset(pyblish.api.InstancePlugin): "usd", "staticMesh", "skeletalMesh", - "usdComposition", - "usdOverride", + "mvLook", + "mvUsd", + "mvUsdComposition", + "mvUsdOverride", "simpleUnrealTexture" ] exclude_families = ["clip", "render.farm"] From 035c4d2f93fd0a29ba8f6f1789a327878861284a Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 5 Jul 2022 09:30:07 +0200 Subject: [PATCH 204/785] Set up old vs. new integrator per host --- openpype/plugins/publish/integrate.py | 1 + openpype/plugins/publish/integrate_new.py | 4 ++++ 2 files changed, 5 insertions(+) diff --git a/openpype/plugins/publish/integrate.py b/openpype/plugins/publish/integrate.py index 6253a3ec11..d098147603 100644 --- a/openpype/plugins/publish/integrate.py +++ b/openpype/plugins/publish/integrate.py @@ -110,6 +110,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin): label = "Integrate Asset New" order = pyblish.api.IntegratorOrder + hosts = ["maya"] families = ["workfile", "pointcache", "camera", diff --git a/openpype/plugins/publish/integrate_new.py b/openpype/plugins/publish/integrate_new.py index fd3cf8882d..c9848abc14 100644 --- a/openpype/plugins/publish/integrate_new.py +++ b/openpype/plugins/publish/integrate_new.py @@ -62,6 +62,10 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): label = "Integrate Asset New" order = pyblish.api.IntegratorOrder + hosts = ["aftereffects", "blender", "celaction", "flame", "harmony", + "hiero", "houdini", "nuke", "photoshop", "resolve", + "standalonepublisher", "traypublisher", "tvpaint", "unreal", + "webpublisher"] families = ["workfile", "pointcache", "camera", From a3757636e7705b34699adff7e1e23f7ff57284d6 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 5 Jul 2022 09:31:53 +0200 Subject: [PATCH 205/785] Remove 'intent' context data override @iLLiCiTiT says: Intent should be a dictionary with "value" and "label", to be able tell if you want use value or label of the intent in templates. --- openpype/plugins/publish/collect_anatomy_context_data.py | 8 -------- 1 file changed, 8 deletions(-) diff --git a/openpype/plugins/publish/collect_anatomy_context_data.py b/openpype/plugins/publish/collect_anatomy_context_data.py index 8db9d0d3d7..0794adfb67 100644 --- a/openpype/plugins/publish/collect_anatomy_context_data.py +++ b/openpype/plugins/publish/collect_anatomy_context_data.py @@ -92,13 +92,5 @@ class CollectAnatomyContextData(pyblish.api.ContextPlugin): } }) - # todo: some code actually expects the dict itself and others doesn't - # question: what should it be? - intent = context.data.get("intent") - if intent and isinstance(intent, dict): - intent = intent.get("value") - if intent: - context_data["intent"] = intent - self.log.info("Global anatomy Data collected") self.log.debug(json.dumps(context_data, indent=4)) From b4697b6e1a0cc778765d617b68d1e516ca7dcea9 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 5 Jul 2022 10:36:37 +0200 Subject: [PATCH 206/785] Refactor integrator labels --- openpype/plugins/publish/integrate.py | 2 +- openpype/plugins/publish/integrate_new.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/plugins/publish/integrate.py b/openpype/plugins/publish/integrate.py index d098147603..5e86eb014a 100644 --- a/openpype/plugins/publish/integrate.py +++ b/openpype/plugins/publish/integrate.py @@ -108,7 +108,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin): "data": additional metadata for each representation. """ - label = "Integrate Asset New" + label = "Integrate Asset" order = pyblish.api.IntegratorOrder hosts = ["maya"] families = ["workfile", diff --git a/openpype/plugins/publish/integrate_new.py b/openpype/plugins/publish/integrate_new.py index c9848abc14..baa14b285c 100644 --- a/openpype/plugins/publish/integrate_new.py +++ b/openpype/plugins/publish/integrate_new.py @@ -60,7 +60,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): "data": additional metadata for each representation. """ - label = "Integrate Asset New" + label = "Integrate Asset (legacy)" order = pyblish.api.IntegratorOrder hosts = ["aftereffects", "blender", "celaction", "flame", "harmony", "hiero", "houdini", "nuke", "photoshop", "resolve", From 49fd9e6308f711ee261293081ac1c5375c669043 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 5 Jul 2022 15:51:54 +0200 Subject: [PATCH 207/785] editorial tray publisher kick-off --- openpype/hosts/traypublisher/api/plugin.py | 94 ++++++++++++++++++- .../plugins/create/create_editorial.py | 25 +++++ .../plugins/create/create_from_settings.py | 7 +- .../project_settings/traypublisher.json | 38 +++++++- .../schema_project_traypublisher.json | 83 ++++++++++++++++ 5 files changed, 241 insertions(+), 6 deletions(-) create mode 100644 openpype/hosts/traypublisher/plugins/create/create_editorial.py diff --git a/openpype/hosts/traypublisher/api/plugin.py b/openpype/hosts/traypublisher/api/plugin.py index 202664cfc6..901f05c755 100644 --- a/openpype/hosts/traypublisher/api/plugin.py +++ b/openpype/hosts/traypublisher/api/plugin.py @@ -2,7 +2,14 @@ from openpype.pipeline import ( Creator, CreatedInstance ) -from openpype.lib import FileDef +from openpype.lib import ( + FileDef, + TextDef, + NumberDef, + EnumDef, + BoolDef, + FileDefItem +) from .pipeline import ( list_instances, @@ -95,3 +102,88 @@ class SettingsCreator(TrayPublishCreator): "default_variants": item_data["default_variants"] } ) + + +class EditorialCreator(TrayPublishCreator): + create_allow_context_change = True + + extensions = [] + + def collect_instances(self): + for instance_data in list_instances(): + creator_id = instance_data.get("creator_identifier") + if creator_id == self.identifier: + instance = CreatedInstance.from_existing( + instance_data, self + ) + self._add_instance_to_context(instance) + + def create(self, subset_name, data, pre_create_data): + # Pass precreate data to creator attributes + data["creator_attributes"] = pre_create_data + data["settings_creator"] = True + # Create new instance + new_instance = CreatedInstance(self.family, subset_name, data, self) + # Host implementation of storing metadata about instance + HostContext.add_instance(new_instance.data_to_store()) + # Add instance to current context + self._add_instance_to_context(new_instance) + + def get_instance_attr_defs(self): + if self.identifier == "editorial.simple": + return [ + FileDef( + "sequence_filepath", + folders=False, + extensions=self.sequence_extensions, + allow_sequences=self.allow_sequences, + label="Filepath", + ) + ] + else: + return [ + FileDef( + "sequence_filepath", + folders=False, + extensions=self.sequence_extensions, + allow_sequences=self.allow_sequences, + label="Sequence filepath", + ), + FileDef( + "clip_source_folder", + folders=True, + extensions=self.clip_extensions, + allow_sequences=False, + label="Clips' Source folder", + ), + TextDef("text input"), + NumberDef("number input"), + EnumDef("enum input", { + "value1": "label1", + "value2": "label2" + }), + BoolDef("bool input") + ] + + @classmethod + def from_settings(cls, item_data): + identifier = item_data["identifier"] + family = item_data["family"] + if not identifier: + identifier = "settings_{}".format(family) + return type( + "{}{}".format(cls.__name__, identifier), + (cls, ), + { + "family": family, + "identifier": identifier, + "label": item_data["label"].strip(), + "icon": item_data["icon"], + "description": item_data["description"], + "detailed_description": item_data["detailed_description"], + "sequence_extensions": item_data["sequence_extensions"], + "clip_extensions": item_data["clip_extensions"], + "allow_sequences": item_data["allow_sequences"], + "default_variants": item_data["default_variants"] + } + ) diff --git a/openpype/hosts/traypublisher/plugins/create/create_editorial.py b/openpype/hosts/traypublisher/plugins/create/create_editorial.py new file mode 100644 index 0000000000..d7fe0f952c --- /dev/null +++ b/openpype/hosts/traypublisher/plugins/create/create_editorial.py @@ -0,0 +1,25 @@ +import os +from pprint import pformat +from openpype.api import get_project_settings, Logger + +log = Logger.get_logger(__name__) + + +def CreateEditorial(): + from openpype.hosts.traypublisher.api.plugin import EditorialCreator + + project_name = os.environ["AVALON_PROJECT"] + project_settings = get_project_settings(project_name) + + simple_creators = project_settings["traypublisher"]["editorial_creators"] + + global_variables = globals() + for item in simple_creators: + + log.debug(pformat(item)) + + dynamic_plugin = EditorialCreator.from_settings(item) + global_variables[dynamic_plugin.__name__] = dynamic_plugin + + +CreateEditorial() diff --git a/openpype/hosts/traypublisher/plugins/create/create_from_settings.py b/openpype/hosts/traypublisher/plugins/create/create_from_settings.py index baca274ea6..1271e03fdb 100644 --- a/openpype/hosts/traypublisher/plugins/create/create_from_settings.py +++ b/openpype/hosts/traypublisher/plugins/create/create_from_settings.py @@ -1,6 +1,8 @@ import os +from pprint import pformat +from openpype.api import get_project_settings, Logger -from openpype.api import get_project_settings +log = Logger.get_logger(__name__) def initialize(): @@ -13,6 +15,9 @@ def initialize(): global_variables = globals() for item in simple_creators: + + log.debug(pformat(item)) + dynamic_plugin = SettingsCreator.from_settings(item) global_variables[dynamic_plugin.__name__] = dynamic_plugin diff --git a/openpype/settings/defaults/project_settings/traypublisher.json b/openpype/settings/defaults/project_settings/traypublisher.json index e938384282..64cbd4a6f3 100644 --- a/openpype/settings/defaults/project_settings/traypublisher.json +++ b/openpype/settings/defaults/project_settings/traypublisher.json @@ -30,11 +30,13 @@ ".psb", ".aep" ] - }, + } + ], + "editorial_creators": [ { "family": "editorial", - "identifier": "", - "label": "Editorial", + "identifier": "editorial.simple", + "label": "Editorial Simple", "icon": "fa.file", "default_variants": [ "Main" @@ -42,11 +44,39 @@ "description": "Editorial files to generate shots.", "detailed_description": "Supporting publishing new shots to project or updating already created. Publishing will create OTIO file.", "allow_sequences": false, - "extensions": [ + "sequence_extensions": [ ".edl", ".xml", ".aaf", ".fcpxml" + ], + "clip_extensions": [ + ".mov", + ".jpg", + ".png" + ] + }, + { + "family": "editorial", + "identifier": "editorial.complex", + "label": "Editorial Complex", + "icon": "fa.file", + "default_variants": [ + "Main" + ], + "description": "Editorial files to generate shots.", + "detailed_description": "Supporting publishing new shots to project or updating already created. Publishing will create OTIO file.", + "allow_sequences": false, + "sequence_extensions": [ + ".edl", + ".xml", + ".aaf", + ".fcpxml" + ], + "clip_extensions": [ + ".mov", + ".jpg", + ".png" ] } ] diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_traypublisher.json b/openpype/settings/entities/schemas/projects_schema/schema_project_traypublisher.json index 55c1b7b7d7..e112a8c004 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_traypublisher.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_traypublisher.json @@ -78,6 +78,89 @@ } ] } + }, + { + "type": "list", + "collapsible": true, + "key": "editorial_creators", + "label": "Editorial creator plugins", + "use_label_wrap": true, + "collapsible_key": true, + "object_type": { + "type": "dict", + "children": [ + { + "type": "text", + "key": "family", + "label": "Family" + }, + { + "type": "text", + "key": "identifier", + "label": "Identifier", + "placeholder": "< Use 'Family' >", + "tooltip": "All creators must have unique identifier.\nBy default is used 'family' but if you need to have more creators with same families\nyou have to set identifier too." + }, + { + "type": "text", + "key": "label", + "label": "Label" + }, + { + "type": "text", + "key": "icon", + "label": "Icon" + }, + { + "type": "list", + "key": "default_variants", + "label": "Default variants", + "object_type": { + "type": "text" + } + }, + { + "type": "separator" + }, + { + "type": "text", + "key": "description", + "label": "Description" + }, + { + "type": "text", + "key": "detailed_description", + "label": "Detailed Description", + "multiline": true + }, + { + "type": "separator" + }, + { + "key": "allow_sequences", + "label": "Allow sequences", + "type": "boolean" + }, + { + "type": "list", + "key": "sequence_extensions", + "label": "Sequence extensions", + "use_label_wrap": true, + "collapsible_key": true, + "collapsed": false, + "object_type": "text" + }, + { + "type": "list", + "key": "clip_extensions", + "label": "Clip source file extensions", + "use_label_wrap": true, + "collapsible_key": true, + "collapsed": false, + "object_type": "text" + } + ] + } } ] } From f76d84dff177c31f46d369f0ccd97d0a253251a7 Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Wed, 6 Jul 2022 22:57:02 +0300 Subject: [PATCH 208/785] Append "Depth of Field" to plablast options and function. --- openpype/hosts/maya/api/lib.py | 4 ++++ openpype/settings/defaults/project_settings/maya.json | 1 + .../schemas/projects_schema/schemas/schema_maya_capture.json | 5 +++++ openpype/vendor/python/common/capture.py | 3 ++- 4 files changed, 12 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/maya/api/lib.py b/openpype/hosts/maya/api/lib.py index cd41ba3ffd..a159554b54 100644 --- a/openpype/hosts/maya/api/lib.py +++ b/openpype/hosts/maya/api/lib.py @@ -2522,6 +2522,9 @@ def load_capture_preset(data=None): temp_options2['multiSampleEnable'] = False temp_options2['multiSampleCount'] = preset[id][key] + if key == 'renderDepthOfField': + temp_options2['renderDepthOfField'] = preset[id][key] + if key == 'ssaoEnable': if preset[id][key] is True: temp_options2['ssaoEnable'] = True @@ -2618,6 +2621,7 @@ def load_capture_preset(data=None): 'motionBlurSampleCount', 'motionBlurShutterOpenFraction', 'lineAAEnable', + 'renderDepthOfField' ]: temp_options.pop(key, None) diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index bb7719dc30..39a8688267 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -496,6 +496,7 @@ "override_viewport_options": true, "displayLights": "default", "textureMaxResolution": 1024, + "renderDepthOfField": true, "shadows": true, "textures": true, "twoSidedLighting": true, diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json index 0a63315622..7a40f349cc 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json @@ -204,6 +204,11 @@ { "type": "splitter" }, + { + "type":"boolean", + "key": "renderDepthOfField", + "label": "Depth of Field" + }, { "type": "splitter" }, diff --git a/openpype/vendor/python/common/capture.py b/openpype/vendor/python/common/capture.py index 6b4c40a6e8..4d9e1da3e4 100644 --- a/openpype/vendor/python/common/capture.py +++ b/openpype/vendor/python/common/capture.py @@ -380,7 +380,8 @@ Viewport2Options = { "transparencyAlgorithm": 1, "transparencyQuality": 0.33, "useMaximumHardwareLights": True, - "vertexAnimationCache": 0 + "vertexAnimationCache": 0, + "renderDepthOfField": 0 } From 7444c2653073ec8a3a8ff49030ec547fa51cbfc2 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 7 Jul 2022 11:52:11 +0200 Subject: [PATCH 209/785] trayp: editorial wip --- openpype/hosts/traypublisher/api/editorial.py | 41 +++++++++++++++ openpype/hosts/traypublisher/api/plugin.py | 25 ++-------- .../plugins/create/create_editorial.py | 4 +- .../publish/collect_editorial_instances.py | 50 +++++++++++++++++++ 4 files changed, 98 insertions(+), 22 deletions(-) create mode 100644 openpype/hosts/traypublisher/api/editorial.py create mode 100644 openpype/hosts/traypublisher/plugins/publish/collect_editorial_instances.py diff --git a/openpype/hosts/traypublisher/api/editorial.py b/openpype/hosts/traypublisher/api/editorial.py new file mode 100644 index 0000000000..316163b2fa --- /dev/null +++ b/openpype/hosts/traypublisher/api/editorial.py @@ -0,0 +1,41 @@ + +import os +import opentimelineio as otio +from openpype import lib as plib + +from openpype.pipeline import ( + Creator, + CreatedInstance +) + +from .pipeline import ( + list_instances, + update_instances, + remove_instances, + HostContext, +) + + + +class CreateEditorialInstance: + """Create Editorial OTIO timeline""" + + def __init__(self, file_path, extension=None, resources_dir=None): + self.file_path = file_path + self.video_extension = extension or ".mov" + self.resources_dir = resources_dir + + def create(self): + + # get editorial sequence file into otio timeline object + extension = os.path.splitext(self.file_path)[1] + kwargs = {} + if extension == ".edl": + # EDL has no frame rate embedded so needs explicit + # frame rate else 24 is asssumed. + kwargs["rate"] = plib.get_asset()["data"]["fps"] + + instance.data["otio_timeline"] = otio.adapters.read_from_file( + file_path, **kwargs) + + self.log.info(f"Added OTIO timeline from: `{file_path}`") diff --git a/openpype/hosts/traypublisher/api/plugin.py b/openpype/hosts/traypublisher/api/plugin.py index 901f05c755..ae9e93fd60 100644 --- a/openpype/hosts/traypublisher/api/plugin.py +++ b/openpype/hosts/traypublisher/api/plugin.py @@ -41,7 +41,7 @@ class TrayPublishCreator(Creator): self._remove_instance_from_context(instance) def get_pre_create_attr_defs(self): - # Use same attributes as for instance attrobites + # Use same attributes as for instance attributes return self.get_instance_attr_defs() @@ -50,15 +50,6 @@ class SettingsCreator(TrayPublishCreator): extensions = [] - def collect_instances(self): - for instance_data in list_instances(): - creator_id = instance_data.get("creator_identifier") - if creator_id == self.identifier: - instance = CreatedInstance.from_existing( - instance_data, self - ) - self._add_instance_to_context(instance) - def create(self, subset_name, data, pre_create_data): # Pass precreate data to creator attributes data["creator_attributes"] = pre_create_data @@ -109,19 +100,13 @@ class EditorialCreator(TrayPublishCreator): extensions = [] - def collect_instances(self): - for instance_data in list_instances(): - creator_id = instance_data.get("creator_identifier") - if creator_id == self.identifier: - instance = CreatedInstance.from_existing( - instance_data, self - ) - self._add_instance_to_context(instance) - def create(self, subset_name, data, pre_create_data): + # TODO: create otio instance + # TODO: create clip instances + # Pass precreate data to creator attributes data["creator_attributes"] = pre_create_data - data["settings_creator"] = True + data["editorial_creator"] = True # Create new instance new_instance = CreatedInstance(self.family, subset_name, data, self) # Host implementation of storing metadata about instance diff --git a/openpype/hosts/traypublisher/plugins/create/create_editorial.py b/openpype/hosts/traypublisher/plugins/create/create_editorial.py index d7fe0f952c..8b2af8973b 100644 --- a/openpype/hosts/traypublisher/plugins/create/create_editorial.py +++ b/openpype/hosts/traypublisher/plugins/create/create_editorial.py @@ -11,10 +11,10 @@ def CreateEditorial(): project_name = os.environ["AVALON_PROJECT"] project_settings = get_project_settings(project_name) - simple_creators = project_settings["traypublisher"]["editorial_creators"] + editorial_creators = project_settings["traypublisher"]["editorial_creators"] global_variables = globals() - for item in simple_creators: + for item in editorial_creators: log.debug(pformat(item)) diff --git a/openpype/hosts/traypublisher/plugins/publish/collect_editorial_instances.py b/openpype/hosts/traypublisher/plugins/publish/collect_editorial_instances.py new file mode 100644 index 0000000000..874b6101c3 --- /dev/null +++ b/openpype/hosts/traypublisher/plugins/publish/collect_editorial_instances.py @@ -0,0 +1,50 @@ +import os +import pyblish.api + + +class CollectSettingsSimpleInstances(pyblish.api.InstancePlugin): + """Collect data for instances created by settings creators.""" + + label = "Collect Settings Simple Instances" + order = pyblish.api.CollectorOrder - 0.49 + + hosts = ["traypublisher"] + + def process(self, instance): + if not instance.data.get("ediorial_creator"): + return + + if "families" not in instance.data: + instance.data["families"] = [] + + if "representations" not in instance.data: + instance.data["representations"] = [] + repres = instance.data["representations"] + + creator_attributes = instance.data["creator_attributes"] + filepath_item = creator_attributes["filepath"] + self.log.info(filepath_item) + filepaths = [ + os.path.join(filepath_item["directory"], filename) + for filename in filepath_item["filenames"] + ] + + instance.data["sourceFilepaths"] = filepaths + instance.data["stagingDir"] = filepath_item["directory"] + + filenames = filepath_item["filenames"] + _, ext = os.path.splitext(filenames[0]) + ext = ext[1:] + if len(filenames) == 1: + filenames = filenames[0] + + repres.append({ + "ext": ext, + "name": ext, + "stagingDir": filepath_item["directory"], + "files": filenames + }) + + self.log.debug("Created Simple Settings instance {}".format( + instance.data + )) From a88b1f1a33c1dada33a67cbe488776ce0c5f0b22 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 7 Jul 2022 22:02:15 +0200 Subject: [PATCH 210/785] trayp: editorial family wip --- openpype/hosts/traypublisher/api/editorial.py | 41 ------- openpype/hosts/traypublisher/api/plugin.py | 86 +------------ .../plugins/create/create_editorial.py | 116 ++++++++++++++++-- openpype/pipeline/create/creator_plugins.py | 6 + 4 files changed, 112 insertions(+), 137 deletions(-) delete mode 100644 openpype/hosts/traypublisher/api/editorial.py diff --git a/openpype/hosts/traypublisher/api/editorial.py b/openpype/hosts/traypublisher/api/editorial.py deleted file mode 100644 index 316163b2fa..0000000000 --- a/openpype/hosts/traypublisher/api/editorial.py +++ /dev/null @@ -1,41 +0,0 @@ - -import os -import opentimelineio as otio -from openpype import lib as plib - -from openpype.pipeline import ( - Creator, - CreatedInstance -) - -from .pipeline import ( - list_instances, - update_instances, - remove_instances, - HostContext, -) - - - -class CreateEditorialInstance: - """Create Editorial OTIO timeline""" - - def __init__(self, file_path, extension=None, resources_dir=None): - self.file_path = file_path - self.video_extension = extension or ".mov" - self.resources_dir = resources_dir - - def create(self): - - # get editorial sequence file into otio timeline object - extension = os.path.splitext(self.file_path)[1] - kwargs = {} - if extension == ".edl": - # EDL has no frame rate embedded so needs explicit - # frame rate else 24 is asssumed. - kwargs["rate"] = plib.get_asset()["data"]["fps"] - - instance.data["otio_timeline"] = otio.adapters.read_from_file( - file_path, **kwargs) - - self.log.info(f"Added OTIO timeline from: `{file_path}`") diff --git a/openpype/hosts/traypublisher/api/plugin.py b/openpype/hosts/traypublisher/api/plugin.py index ae9e93fd60..94f6e7487f 100644 --- a/openpype/hosts/traypublisher/api/plugin.py +++ b/openpype/hosts/traypublisher/api/plugin.py @@ -3,12 +3,7 @@ from openpype.pipeline import ( CreatedInstance ) from openpype.lib import ( - FileDef, - TextDef, - NumberDef, - EnumDef, - BoolDef, - FileDefItem + FileDef ) from .pipeline import ( @@ -93,82 +88,3 @@ class SettingsCreator(TrayPublishCreator): "default_variants": item_data["default_variants"] } ) - - -class EditorialCreator(TrayPublishCreator): - create_allow_context_change = True - - extensions = [] - - def create(self, subset_name, data, pre_create_data): - # TODO: create otio instance - # TODO: create clip instances - - # Pass precreate data to creator attributes - data["creator_attributes"] = pre_create_data - data["editorial_creator"] = True - # Create new instance - new_instance = CreatedInstance(self.family, subset_name, data, self) - # Host implementation of storing metadata about instance - HostContext.add_instance(new_instance.data_to_store()) - # Add instance to current context - self._add_instance_to_context(new_instance) - - def get_instance_attr_defs(self): - if self.identifier == "editorial.simple": - return [ - FileDef( - "sequence_filepath", - folders=False, - extensions=self.sequence_extensions, - allow_sequences=self.allow_sequences, - label="Filepath", - ) - ] - else: - return [ - FileDef( - "sequence_filepath", - folders=False, - extensions=self.sequence_extensions, - allow_sequences=self.allow_sequences, - label="Sequence filepath", - ), - FileDef( - "clip_source_folder", - folders=True, - extensions=self.clip_extensions, - allow_sequences=False, - label="Clips' Source folder", - ), - TextDef("text input"), - NumberDef("number input"), - EnumDef("enum input", { - "value1": "label1", - "value2": "label2" - }), - BoolDef("bool input") - ] - - @classmethod - def from_settings(cls, item_data): - identifier = item_data["identifier"] - family = item_data["family"] - if not identifier: - identifier = "settings_{}".format(family) - return type( - "{}{}".format(cls.__name__, identifier), - (cls, ), - { - "family": family, - "identifier": identifier, - "label": item_data["label"].strip(), - "icon": item_data["icon"], - "description": item_data["description"], - "detailed_description": item_data["detailed_description"], - "sequence_extensions": item_data["sequence_extensions"], - "clip_extensions": item_data["clip_extensions"], - "allow_sequences": item_data["allow_sequences"], - "default_variants": item_data["default_variants"] - } - ) diff --git a/openpype/hosts/traypublisher/plugins/create/create_editorial.py b/openpype/hosts/traypublisher/plugins/create/create_editorial.py index 8b2af8973b..49fba65711 100644 --- a/openpype/hosts/traypublisher/plugins/create/create_editorial.py +++ b/openpype/hosts/traypublisher/plugins/create/create_editorial.py @@ -1,25 +1,119 @@ import os -from pprint import pformat -from openpype.api import get_project_settings, Logger +import opentimelineio as otio +from openpype.api import get_project_settings +from openpype.hosts.traypublisher.api.plugin import TrayPublishCreator +from openpype.pipeline.create.creator_plugins import InvisibleCreator -log = Logger.get_logger(__name__) +from openpype.pipeline import CreatedInstance + +from openpype.lib import ( + FileDef, + TextDef, + NumberDef, + EnumDef, + BoolDef +) + +from openpype.hosts.traypublisher.api.pipeline import HostContext def CreateEditorial(): - from openpype.hosts.traypublisher.api.plugin import EditorialCreator - project_name = os.environ["AVALON_PROJECT"] project_settings = get_project_settings(project_name) editorial_creators = project_settings["traypublisher"]["editorial_creators"] - global_variables = globals() - for item in editorial_creators: - log.debug(pformat(item)) +class EditorialClipInstanceCreator(InvisibleCreator): + identifier = "editorial.clip" + family = "clip" - dynamic_plugin = EditorialCreator.from_settings(item) - global_variables[dynamic_plugin.__name__] = dynamic_plugin + def create(self, instance_data, source_data): + # instance_data > asset, task_name, variant, family + # source_data > additional data + self.log.info(f"instance_data: {instance_data}") + self.log.info(f"source_data: {source_data}") -CreateEditorial() +class EditorialSimpleCreator(TrayPublishCreator): + + label = "Editorial Simple" + family = "editorial" + identifier = "editorial.simple" + default_variants = [ + "main", + "review" + ] + description = "Editorial files to generate shots." + detailed_description = """ +Supporting publishing new shots to project +or updating already created. Publishing will create OTIO file. +""" + icon = "fa.file" + + def create(self, subset_name, data, pre_create_data): + # TODO: create otio instance + otio_timeline = self._create_otio_instance( + subset_name, data, pre_create_data) + + # TODO: create clip instances + editorial_clip_creator = self.create_context.creators["editorial.clip"] + editorial_clip_creator.create({}, {}) + + def _create_otio_instance(self, subset_name, data, pre_create_data): + # from openpype import lib as plib + + # get path of sequence + file_path_data = pre_create_data["sequence_filepath_data"] + file_path = os.path.join( + file_path_data["directory"], file_path_data["filenames"][0]) + + self.log.info(f"file_path: {file_path}") + + # get editorial sequence file into otio timeline object + extension = os.path.splitext(file_path)[1] + kwargs = {} + if extension == ".edl": + # EDL has no frame rate embedded so needs explicit + # frame rate else 24 is asssumed. + kwargs["rate"] = float(25) + # plib.get_asset()["data"]["fps"] + + self.log.info(f"kwargs: {kwargs}") + otio_timeline = otio.adapters.read_from_file( + file_path, **kwargs) + + # Pass precreate data to creator attributes + data.update({ + "creator_attributes": pre_create_data, + "editorial_creator": True + + }) + + self._create_instance(self.family, subset_name, data) + + return otio_timeline + + def _create_instance(self, family, subset_name, data): + # Create new instance + new_instance = CreatedInstance(family, subset_name, data, self) + # Host implementation of storing metadata about instance + HostContext.add_instance(new_instance.data_to_store()) + # Add instance to current context + self._add_instance_to_context(new_instance) + + def get_instance_attr_defs(self): + return [ + FileDef( + "sequence_filepath_data", + folders=False, + extensions=[ + ".edl", + ".xml", + ".aaf", + ".fcpxml" + ], + allow_sequences=False, + label="Filepath", + ) + ] diff --git a/openpype/pipeline/create/creator_plugins.py b/openpype/pipeline/create/creator_plugins.py index 8006d4f4f8..778d6846b2 100644 --- a/openpype/pipeline/create/creator_plugins.py +++ b/openpype/pipeline/create/creator_plugins.py @@ -342,6 +342,12 @@ class Creator(BaseCreator): return self.pre_create_attr_defs +class InvisibleCreator(BaseCreator): + @abstractmethod + def create(self, instance_data, source_data): + pass + + class AutoCreator(BaseCreator): """Creator which is automatically triggered without user interaction. From 14acec63c2d0a3760f9ecbf41a431461f9bc459b Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 8 Jul 2022 10:49:35 +0200 Subject: [PATCH 211/785] create plugins have access to project name --- openpype/pipeline/create/context.py | 4 ++++ openpype/pipeline/create/creator_plugins.py | 6 ++++++ 2 files changed, 10 insertions(+) diff --git a/openpype/pipeline/create/context.py b/openpype/pipeline/create/context.py index 12cd9bbc68..c91b13e520 100644 --- a/openpype/pipeline/create/context.py +++ b/openpype/pipeline/create/context.py @@ -748,6 +748,10 @@ class CreateContext: def host_name(self): return os.environ["AVALON_APP"] + @property + def project_name(self): + return self.dbcon.active_project() + @property def log(self): """Dynamic access to logger.""" diff --git a/openpype/pipeline/create/creator_plugins.py b/openpype/pipeline/create/creator_plugins.py index 778d6846b2..be3f3d4cbd 100644 --- a/openpype/pipeline/create/creator_plugins.py +++ b/openpype/pipeline/create/creator_plugins.py @@ -92,6 +92,12 @@ class BaseCreator: """Family that plugin represents.""" pass + @property + def project_name(self): + """Family that plugin represents.""" + + self.create_context.project_name + @property def log(self): if self._log is None: From 9eed955303f3937b0e0ddb4fbd515408a69c0e95 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 8 Jul 2022 11:02:22 +0200 Subject: [PATCH 212/785] use settings on init and query asset document --- .../plugins/create/create_editorial.py | 37 ++++++++++++++----- 1 file changed, 27 insertions(+), 10 deletions(-) diff --git a/openpype/hosts/traypublisher/plugins/create/create_editorial.py b/openpype/hosts/traypublisher/plugins/create/create_editorial.py index 49fba65711..54a52dfb75 100644 --- a/openpype/hosts/traypublisher/plugins/create/create_editorial.py +++ b/openpype/hosts/traypublisher/plugins/create/create_editorial.py @@ -1,6 +1,7 @@ import os +from copy import deepcopy import opentimelineio as otio -from openpype.api import get_project_settings +from openpype.client import get_asset_by_name from openpype.hosts.traypublisher.api.plugin import TrayPublishCreator from openpype.pipeline.create.creator_plugins import InvisibleCreator @@ -17,16 +18,18 @@ from openpype.lib import ( from openpype.hosts.traypublisher.api.pipeline import HostContext -def CreateEditorial(): - project_name = os.environ["AVALON_PROJECT"] - project_settings = get_project_settings(project_name) - - editorial_creators = project_settings["traypublisher"]["editorial_creators"] - - class EditorialClipInstanceCreator(InvisibleCreator): identifier = "editorial.clip" family = "clip" + host_name = "traypublisher" + + def __init__( + self, create_context, system_settings, project_settings, + *args, **kwargs + ): + super(EditorialClipInstanceCreator, self).__init__( + create_context, system_settings, project_settings, *args, **kwargs + ) def create(self, instance_data, source_data): # instance_data > asset, task_name, variant, family @@ -51,10 +54,24 @@ or updating already created. Publishing will create OTIO file. """ icon = "fa.file" - def create(self, subset_name, data, pre_create_data): + def __init__( + self, create_context, system_settings, project_settings, + *args, **kwargs + ): + super(EditorialSimpleCreator, self).__init__( + create_context, system_settings, project_settings, *args, **kwargs + ) + editorial_creators = ( + project_settings["traypublisher"]["editorial_creators"] + ) + self._editorial_creators = deepcopy(editorial_creators) + + def create(self, subset_name, instance_data, pre_create_data): # TODO: create otio instance + asset_name = instance_data["asset"] + asset_doc = get_asset_by_name(self.project_name, asset_name) otio_timeline = self._create_otio_instance( - subset_name, data, pre_create_data) + subset_name, instance_data, pre_create_data) # TODO: create clip instances editorial_clip_creator = self.create_context.creators["editorial.clip"] From dcc64f9eb425c94e85a66df53d1c2ddd7762b7b7 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 8 Jul 2022 11:13:15 +0200 Subject: [PATCH 213/785] trayp: updating create_editorial --- .../hosts/traypublisher/plugins/create/create_editorial.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/traypublisher/plugins/create/create_editorial.py b/openpype/hosts/traypublisher/plugins/create/create_editorial.py index 54a52dfb75..a58d968e3d 100644 --- a/openpype/hosts/traypublisher/plugins/create/create_editorial.py +++ b/openpype/hosts/traypublisher/plugins/create/create_editorial.py @@ -61,10 +61,13 @@ or updating already created. Publishing will create OTIO file. super(EditorialSimpleCreator, self).__init__( create_context, system_settings, project_settings, *args, **kwargs ) - editorial_creators = ( + editorial_creators = deepcopy( project_settings["traypublisher"]["editorial_creators"] ) - self._editorial_creators = deepcopy(editorial_creators) + self._creator_settings = editorial_creators.get(self.__name__) + + if self._creator_settings.get("default_variants"): + self.default_variants = self._creator_settings["default_variants"] def create(self, subset_name, instance_data, pre_create_data): # TODO: create otio instance From c9a70d410f8de60b4171fad7f2314dda7c4d5e20 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 8 Jul 2022 11:15:09 +0200 Subject: [PATCH 214/785] use project_name attribute --- openpype/pipeline/create/context.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/openpype/pipeline/create/context.py b/openpype/pipeline/create/context.py index c91b13e520..8f79110fdf 100644 --- a/openpype/pipeline/create/context.py +++ b/openpype/pipeline/create/context.py @@ -843,9 +843,8 @@ class CreateContext: self.plugins_with_defs = plugins_with_defs # Prepare settings - project_name = self.dbcon.Session["AVALON_PROJECT"] system_settings = get_system_settings() - project_settings = get_project_settings(project_name) + project_settings = get_project_settings(self.project_name) # Discover and prepare creators creators = {} From 76e36015dcd530c6b5c40b9a8a041821d308a1ec Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 8 Jul 2022 11:20:44 +0200 Subject: [PATCH 215/785] implemented invisible tray publisher creator --- openpype/hosts/traypublisher/api/plugin.py | 25 +++++++++++++++++++++- openpype/pipeline/create/__init__.py | 2 ++ 2 files changed, 26 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/traypublisher/api/plugin.py b/openpype/hosts/traypublisher/api/plugin.py index 94f6e7487f..75f73e88b1 100644 --- a/openpype/hosts/traypublisher/api/plugin.py +++ b/openpype/hosts/traypublisher/api/plugin.py @@ -1,5 +1,6 @@ -from openpype.pipeline import ( +from openpype.pipeline.create import ( Creator, + InivisbleCreator, CreatedInstance ) from openpype.lib import ( @@ -14,6 +15,28 @@ from .pipeline import ( ) +class InvisibleTrayPublishCreator(InivisbleCreator): + create_allow_context_change = True + host_name = "traypublisher" + + def collect_instances(self): + for instance_data in list_instances(): + creator_id = instance_data.get("creator_identifier") + if creator_id == self.identifier: + instance = CreatedInstance.from_existing( + instance_data, self + ) + self._add_instance_to_context(instance) + + def update_instances(self, update_list): + update_instances(update_list) + + def remove_instances(self, instances): + remove_instances(instances) + for instance in instances: + self._remove_instance_from_context(instance) + + class TrayPublishCreator(Creator): create_allow_context_change = True host_name = "traypublisher" diff --git a/openpype/pipeline/create/__init__.py b/openpype/pipeline/create/__init__.py index 1beeb4267b..a0f2c16f75 100644 --- a/openpype/pipeline/create/__init__.py +++ b/openpype/pipeline/create/__init__.py @@ -7,6 +7,7 @@ from .creator_plugins import ( BaseCreator, Creator, AutoCreator, + InivisbleCreator, discover_creator_plugins, discover_legacy_creator_plugins, @@ -35,6 +36,7 @@ __all__ = ( "BaseCreator", "Creator", "AutoCreator", + "InivisbleCreator", "discover_creator_plugins", "discover_legacy_creator_plugins", From 82899b1acda57320c0faf31fcf0666a762b041d0 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 8 Jul 2022 11:22:15 +0200 Subject: [PATCH 216/785] implement get_pre_create_attr_defs only for settings creator --- openpype/hosts/traypublisher/api/plugin.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/traypublisher/api/plugin.py b/openpype/hosts/traypublisher/api/plugin.py index 75f73e88b1..c7f2f4ec13 100644 --- a/openpype/hosts/traypublisher/api/plugin.py +++ b/openpype/hosts/traypublisher/api/plugin.py @@ -58,10 +58,6 @@ class TrayPublishCreator(Creator): for instance in instances: self._remove_instance_from_context(instance) - def get_pre_create_attr_defs(self): - # Use same attributes as for instance attributes - return self.get_instance_attr_defs() - class SettingsCreator(TrayPublishCreator): create_allow_context_change = True @@ -90,6 +86,10 @@ class SettingsCreator(TrayPublishCreator): ) ] + def get_pre_create_attr_defs(self): + # Use same attributes as for instance attrobites + return self.get_instance_attr_defs() + @classmethod def from_settings(cls, item_data): identifier = item_data["identifier"] From 10aae0e98686c6cc2463b4b763778adcb25608aa Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 8 Jul 2022 11:28:02 +0200 Subject: [PATCH 217/785] fixing invisible creator name --- .../plugins/create/create_editorial.py | 13 +++++++++---- openpype/pipeline/create/__init__.py | 4 ++-- 2 files changed, 11 insertions(+), 6 deletions(-) diff --git a/openpype/hosts/traypublisher/plugins/create/create_editorial.py b/openpype/hosts/traypublisher/plugins/create/create_editorial.py index a58d968e3d..61f24ec60e 100644 --- a/openpype/hosts/traypublisher/plugins/create/create_editorial.py +++ b/openpype/hosts/traypublisher/plugins/create/create_editorial.py @@ -2,8 +2,11 @@ import os from copy import deepcopy import opentimelineio as otio from openpype.client import get_asset_by_name -from openpype.hosts.traypublisher.api.plugin import TrayPublishCreator -from openpype.pipeline.create.creator_plugins import InvisibleCreator +from openpype.hosts.traypublisher.api.plugin import ( + TrayPublishCreator, + InvisibleTrayPublishCreator +) + from openpype.pipeline import CreatedInstance @@ -18,7 +21,7 @@ from openpype.lib import ( from openpype.hosts.traypublisher.api.pipeline import HostContext -class EditorialClipInstanceCreator(InvisibleCreator): +class EditorialClipInstanceCreator(InvisibleTrayPublishCreator): identifier = "editorial.clip" family = "clip" host_name = "traypublisher" @@ -64,8 +67,10 @@ or updating already created. Publishing will create OTIO file. editorial_creators = deepcopy( project_settings["traypublisher"]["editorial_creators"] ) - self._creator_settings = editorial_creators.get(self.__name__) + # get this creator settings by identifier + self._creator_settings = editorial_creators.get(self.identifier) + # try to set main attributes from settings if self._creator_settings.get("default_variants"): self.default_variants = self._creator_settings["default_variants"] diff --git a/openpype/pipeline/create/__init__.py b/openpype/pipeline/create/__init__.py index a0f2c16f75..cd01c53cf5 100644 --- a/openpype/pipeline/create/__init__.py +++ b/openpype/pipeline/create/__init__.py @@ -7,7 +7,7 @@ from .creator_plugins import ( BaseCreator, Creator, AutoCreator, - InivisbleCreator, + InvisibleCreator, discover_creator_plugins, discover_legacy_creator_plugins, @@ -36,7 +36,7 @@ __all__ = ( "BaseCreator", "Creator", "AutoCreator", - "InivisbleCreator", + "InvisibleCreator", "discover_creator_plugins", "discover_legacy_creator_plugins", From a31ea2a24d4de68fbbb6f47d5eb224cd02e182e7 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 8 Jul 2022 11:28:37 +0200 Subject: [PATCH 218/785] fixing invisible creator name 2 --- openpype/hosts/traypublisher/api/plugin.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/traypublisher/api/plugin.py b/openpype/hosts/traypublisher/api/plugin.py index c7f2f4ec13..0d7651e464 100644 --- a/openpype/hosts/traypublisher/api/plugin.py +++ b/openpype/hosts/traypublisher/api/plugin.py @@ -1,6 +1,6 @@ from openpype.pipeline.create import ( Creator, - InivisbleCreator, + InvisibleCreator, CreatedInstance ) from openpype.lib import ( @@ -15,8 +15,7 @@ from .pipeline import ( ) -class InvisibleTrayPublishCreator(InivisbleCreator): - create_allow_context_change = True +class InvisibleTrayPublishCreator(InvisibleCreator): host_name = "traypublisher" def collect_instances(self): From 2270c972906b6aac890c95016c125f4001a63f0f Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 8 Jul 2022 11:28:51 +0200 Subject: [PATCH 219/785] trayp: adding settings --- .../project_settings/traypublisher.json | 47 +-------- .../schema_project_traypublisher.json | 96 ++++--------------- 2 files changed, 24 insertions(+), 119 deletions(-) diff --git a/openpype/settings/defaults/project_settings/traypublisher.json b/openpype/settings/defaults/project_settings/traypublisher.json index 64cbd4a6f3..4a672789ed 100644 --- a/openpype/settings/defaults/project_settings/traypublisher.json +++ b/openpype/settings/defaults/project_settings/traypublisher.json @@ -32,52 +32,11 @@ ] } ], - "editorial_creators": [ - { - "family": "editorial", - "identifier": "editorial.simple", - "label": "Editorial Simple", - "icon": "fa.file", + "editorial_creators": { + "editorial.simple": { "default_variants": [ "Main" - ], - "description": "Editorial files to generate shots.", - "detailed_description": "Supporting publishing new shots to project or updating already created. Publishing will create OTIO file.", - "allow_sequences": false, - "sequence_extensions": [ - ".edl", - ".xml", - ".aaf", - ".fcpxml" - ], - "clip_extensions": [ - ".mov", - ".jpg", - ".png" - ] - }, - { - "family": "editorial", - "identifier": "editorial.complex", - "label": "Editorial Complex", - "icon": "fa.file", - "default_variants": [ - "Main" - ], - "description": "Editorial files to generate shots.", - "detailed_description": "Supporting publishing new shots to project or updating already created. Publishing will create OTIO file.", - "allow_sequences": false, - "sequence_extensions": [ - ".edl", - ".xml", - ".aaf", - ".fcpxml" - ], - "clip_extensions": [ - ".mov", - ".jpg", - ".png" ] } - ] + } } \ No newline at end of file diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_traypublisher.json b/openpype/settings/entities/schemas/projects_schema/schema_project_traypublisher.json index e112a8c004..1b24fcbe93 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_traypublisher.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_traypublisher.json @@ -80,87 +80,33 @@ } }, { - "type": "list", + "type": "dict", "collapsible": true, "key": "editorial_creators", "label": "Editorial creator plugins", "use_label_wrap": true, "collapsible_key": true, - "object_type": { - "type": "dict", - "children": [ - { - "type": "text", - "key": "family", - "label": "Family" - }, - { - "type": "text", - "key": "identifier", - "label": "Identifier", - "placeholder": "< Use 'Family' >", - "tooltip": "All creators must have unique identifier.\nBy default is used 'family' but if you need to have more creators with same families\nyou have to set identifier too." - }, - { - "type": "text", - "key": "label", - "label": "Label" - }, - { - "type": "text", - "key": "icon", - "label": "Icon" - }, - { - "type": "list", - "key": "default_variants", - "label": "Default variants", - "object_type": { - "type": "text" + "children": [ + { + "type": "dict", + "collapsible": true, + "key": "editorial.simple", + "label": "Editorial simple creator", + "use_label_wrap": true, + "collapsible_key": true, + "children": [ + + { + "type": "list", + "key": "default_variants", + "label": "Default variants", + "object_type": { + "type": "text" + } } - }, - { - "type": "separator" - }, - { - "type": "text", - "key": "description", - "label": "Description" - }, - { - "type": "text", - "key": "detailed_description", - "label": "Detailed Description", - "multiline": true - }, - { - "type": "separator" - }, - { - "key": "allow_sequences", - "label": "Allow sequences", - "type": "boolean" - }, - { - "type": "list", - "key": "sequence_extensions", - "label": "Sequence extensions", - "use_label_wrap": true, - "collapsible_key": true, - "collapsed": false, - "object_type": "text" - }, - { - "type": "list", - "key": "clip_extensions", - "label": "Clip source file extensions", - "use_label_wrap": true, - "collapsible_key": true, - "collapsed": false, - "object_type": "text" - } - ] - } + ] + } + ] } ] } From aa79551cedf9fef3c78535fbe8f3a3b819fa3f7f Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 8 Jul 2022 11:34:31 +0200 Subject: [PATCH 220/785] trayp: identifier as key in settings didnt work with dot --- openpype/hosts/traypublisher/plugins/create/create_editorial.py | 2 +- openpype/settings/defaults/project_settings/traypublisher.json | 2 +- .../schemas/projects_schema/schema_project_traypublisher.json | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/traypublisher/plugins/create/create_editorial.py b/openpype/hosts/traypublisher/plugins/create/create_editorial.py index 61f24ec60e..442ff77130 100644 --- a/openpype/hosts/traypublisher/plugins/create/create_editorial.py +++ b/openpype/hosts/traypublisher/plugins/create/create_editorial.py @@ -45,7 +45,7 @@ class EditorialSimpleCreator(TrayPublishCreator): label = "Editorial Simple" family = "editorial" - identifier = "editorial.simple" + identifier = "editorialSimple" default_variants = [ "main", "review" diff --git a/openpype/settings/defaults/project_settings/traypublisher.json b/openpype/settings/defaults/project_settings/traypublisher.json index 4a672789ed..ef6dc5fec7 100644 --- a/openpype/settings/defaults/project_settings/traypublisher.json +++ b/openpype/settings/defaults/project_settings/traypublisher.json @@ -33,7 +33,7 @@ } ], "editorial_creators": { - "editorial.simple": { + "editorialSimple": { "default_variants": [ "Main" ] diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_traypublisher.json b/openpype/settings/entities/schemas/projects_schema/schema_project_traypublisher.json index 1b24fcbe93..11ae0e65a7 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_traypublisher.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_traypublisher.json @@ -90,7 +90,7 @@ { "type": "dict", "collapsible": true, - "key": "editorial.simple", + "key": "editorialSimple", "label": "Editorial simple creator", "use_label_wrap": true, "collapsible_key": true, From ec21481c60847f35c98bbf534a1479440d8bd28f Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 8 Jul 2022 12:04:29 +0200 Subject: [PATCH 221/785] trayp: adding precreate properties --- .../plugins/create/create_editorial.py | 20 +++++++++++-------- 1 file changed, 12 insertions(+), 8 deletions(-) diff --git a/openpype/hosts/traypublisher/plugins/create/create_editorial.py b/openpype/hosts/traypublisher/plugins/create/create_editorial.py index 442ff77130..b31072aaf1 100644 --- a/openpype/hosts/traypublisher/plugins/create/create_editorial.py +++ b/openpype/hosts/traypublisher/plugins/create/create_editorial.py @@ -15,14 +15,16 @@ from openpype.lib import ( TextDef, NumberDef, EnumDef, - BoolDef + BoolDef, + UISeparatorDef, + UILabelDef ) from openpype.hosts.traypublisher.api.pipeline import HostContext class EditorialClipInstanceCreator(InvisibleTrayPublishCreator): - identifier = "editorial.clip" + identifier = "editorialClip" family = "clip" host_name = "traypublisher" @@ -47,8 +49,7 @@ class EditorialSimpleCreator(TrayPublishCreator): family = "editorial" identifier = "editorialSimple" default_variants = [ - "main", - "review" + "main" ] description = "Editorial files to generate shots." detailed_description = """ @@ -82,7 +83,7 @@ or updating already created. Publishing will create OTIO file. subset_name, instance_data, pre_create_data) # TODO: create clip instances - editorial_clip_creator = self.create_context.creators["editorial.clip"] + editorial_clip_creator = self.create_context.creators["editorialClip"] editorial_clip_creator.create({}, {}) def _create_otio_instance(self, subset_name, data, pre_create_data): @@ -127,7 +128,8 @@ or updating already created. Publishing will create OTIO file. # Add instance to current context self._add_instance_to_context(new_instance) - def get_instance_attr_defs(self): + def get_pre_create_attr_defs(self): + # Use same attributes as for instance attrobites return [ FileDef( "sequence_filepath_data", @@ -140,5 +142,7 @@ or updating already created. Publishing will create OTIO file. ], allow_sequences=False, label="Filepath", - ) - ] + ), + UISeparatorDef(), + UILabelDef("Clip instance attributes") + ] \ No newline at end of file From ba4dd7cc2234b882b0e527e75d0a5bc48666f463 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 8 Jul 2022 12:47:36 +0200 Subject: [PATCH 222/785] creator: fixing returning project_name --- openpype/pipeline/create/creator_plugins.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/pipeline/create/creator_plugins.py b/openpype/pipeline/create/creator_plugins.py index be3f3d4cbd..e0de2baa77 100644 --- a/openpype/pipeline/create/creator_plugins.py +++ b/openpype/pipeline/create/creator_plugins.py @@ -96,7 +96,7 @@ class BaseCreator: def project_name(self): """Family that plugin represents.""" - self.create_context.project_name + return self.create_context.project_name @property def log(self): From 01e548d2ff070fe6e9058f334e097919ea18cee9 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 8 Jul 2022 13:52:36 +0200 Subject: [PATCH 223/785] trayp: fixing init arg --- .../traypublisher/plugins/create/create_editorial.py | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/openpype/hosts/traypublisher/plugins/create/create_editorial.py b/openpype/hosts/traypublisher/plugins/create/create_editorial.py index b31072aaf1..560a5ae047 100644 --- a/openpype/hosts/traypublisher/plugins/create/create_editorial.py +++ b/openpype/hosts/traypublisher/plugins/create/create_editorial.py @@ -29,11 +29,10 @@ class EditorialClipInstanceCreator(InvisibleTrayPublishCreator): host_name = "traypublisher" def __init__( - self, create_context, system_settings, project_settings, - *args, **kwargs + self, project_settings, *args, **kwargs ): super(EditorialClipInstanceCreator, self).__init__( - create_context, system_settings, project_settings, *args, **kwargs + project_settings, *args, **kwargs ) def create(self, instance_data, source_data): @@ -59,11 +58,10 @@ or updating already created. Publishing will create OTIO file. icon = "fa.file" def __init__( - self, create_context, system_settings, project_settings, - *args, **kwargs + self, project_settings, *args, **kwargs ): super(EditorialSimpleCreator, self).__init__( - create_context, system_settings, project_settings, *args, **kwargs + project_settings, *args, **kwargs ) editorial_creators = deepcopy( project_settings["traypublisher"]["editorial_creators"] From c08713c258a230ab20494e692fce9eaa488b8cd3 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 8 Jul 2022 17:19:28 +0200 Subject: [PATCH 224/785] trayp: udpating editorial creator --- .../plugins/create/create_editorial.py | 220 ++++++++++++++++-- 1 file changed, 207 insertions(+), 13 deletions(-) diff --git a/openpype/hosts/traypublisher/plugins/create/create_editorial.py b/openpype/hosts/traypublisher/plugins/create/create_editorial.py index 560a5ae047..ed91f0201f 100644 --- a/openpype/hosts/traypublisher/plugins/create/create_editorial.py +++ b/openpype/hosts/traypublisher/plugins/create/create_editorial.py @@ -1,5 +1,6 @@ import os from copy import deepcopy +from pprint import pformat import opentimelineio as otio from openpype.client import get_asset_by_name from openpype.hosts.traypublisher.api.plugin import ( @@ -23,6 +24,31 @@ from openpype.lib import ( from openpype.hosts.traypublisher.api.pipeline import HostContext +CLIP_ATTR_DEFS = [ + NumberDef( + "timeline_offset", + default=900000, + label="Timeline offset" + ), + UISeparatorDef(), + NumberDef( + "workfile_start_frame", + default=1001, + label="Workfile start frame" + ), + NumberDef( + "handle_start", + default=0, + label="Handle start" + ), + NumberDef( + "handle_end", + default=0, + label="Handle end" + ) +] + + class EditorialClipInstanceCreator(InvisibleTrayPublishCreator): identifier = "editorialClip" family = "clip" @@ -41,6 +67,32 @@ class EditorialClipInstanceCreator(InvisibleTrayPublishCreator): self.log.info(f"instance_data: {instance_data}") self.log.info(f"source_data: {source_data}") + instance_name = "{}_{}".format( + instance_data["name"], + "plateMain" + ) + return self._create_instance(instance_name, instance_data) + + def _create_instance(self, subset_name, data): + # Create new instance + new_instance = CreatedInstance(self.family, subset_name, data, self) + # Host implementation of storing metadata about instance + HostContext.add_instance(new_instance.data_to_store()) + # Add instance to current context + self._add_instance_to_context(new_instance) + + return new_instance + + def get_instance_attr_defs(self): + attr_defs = [ + TextDef( + "asset_name", + label="Asset name", + ) + ] + attr_defs.extend(CLIP_ATTR_DEFS) + return attr_defs + class EditorialSimpleCreator(TrayPublishCreator): @@ -57,6 +109,8 @@ or updating already created. Publishing will create OTIO file. """ icon = "fa.file" + + def __init__( self, project_settings, *args, **kwargs ): @@ -74,19 +128,29 @@ or updating already created. Publishing will create OTIO file. self.default_variants = self._creator_settings["default_variants"] def create(self, subset_name, instance_data, pre_create_data): + clip_instance_properties = { + k: v for k, v in pre_create_data.items() + if k != "sequence_filepath_data" + } # TODO: create otio instance asset_name = instance_data["asset"] asset_doc = get_asset_by_name(self.project_name, asset_name) + fps = asset_doc["data"]["fps"] + instance_data.update({ + "fps": fps + }) otio_timeline = self._create_otio_instance( subset_name, instance_data, pre_create_data) # TODO: create clip instances - editorial_clip_creator = self.create_context.creators["editorialClip"] - editorial_clip_creator.create({}, {}) + clip_instance_properties.update({ + "fps": fps, + "asset_name": asset_name + }) + self._get_clip_instances( + asset_name, otio_timeline, clip_instance_properties) def _create_otio_instance(self, subset_name, data, pre_create_data): - # from openpype import lib as plib - # get path of sequence file_path_data = pre_create_data["sequence_filepath_data"] file_path = os.path.join( @@ -100,8 +164,7 @@ or updating already created. Publishing will create OTIO file. if extension == ".edl": # EDL has no frame rate embedded so needs explicit # frame rate else 24 is asssumed. - kwargs["rate"] = float(25) - # plib.get_asset()["data"]["fps"] + kwargs["rate"] = data["fps"] self.log.info(f"kwargs: {kwargs}") otio_timeline = otio.adapters.read_from_file( @@ -109,15 +172,144 @@ or updating already created. Publishing will create OTIO file. # Pass precreate data to creator attributes data.update({ - "creator_attributes": pre_create_data, - "editorial_creator": True - + "sequence_file_path": file_path }) self._create_instance(self.family, subset_name, data) return otio_timeline + def _get_clip_instances( + self, + asset_name, + otio_timeline, + clip_instance_properties + ): + parent_asset_name = clip_instance_properties["asset_name"] + handle_start = clip_instance_properties["handle_start"] + handle_end = clip_instance_properties["handle_end"] + timeline_offset = clip_instance_properties["timeline_offset"] + workfile_start_frame = clip_instance_properties["workfile_start_frame"] + fps = clip_instance_properties["fps"] + + assets_shared = {} + self.asset_name_check = [] + + editorial_clip_creator = self.create_context.creators["editorialClip"] + + tracks = otio_timeline.each_child( + descended_from_type=otio.schema.Track + ) + + for track in tracks: + self.log.debug(f"track.name: {track.name}") + try: + track_start_frame = ( + abs(track.source_range.start_time.value) + ) + self.log.debug(f"track_start_frame: {track_start_frame}") + track_start_frame -= self.timeline_frame_start + except AttributeError: + track_start_frame = 0 + + self.log.debug(f"track_start_frame: {track_start_frame}") + + for clip in track.each_child(): + + if not self._validate_clip_for_processing(clip): + continue + + # basic unique asset name + clip_name = os.path.splitext(clip.name)[0].lower() + name = f"{asset_name.split('_')[0]}_{clip_name}" + + # make sure the name is unique + self._validate_name_uniqueness(name) + + # frame ranges data + clip_in = clip.range_in_parent().start_time.value + clip_in += track_start_frame + clip_out = clip.range_in_parent().end_time_inclusive().value + clip_out += track_start_frame + self.log.info(f"clip_in: {clip_in} | clip_out: {clip_out}") + + # add offset in case there is any + if timeline_offset: + clip_in += timeline_offset + clip_out += timeline_offset + + clip_duration = clip.duration().value + self.log.info(f"clip duration: {clip_duration}") + + source_in = clip.trimmed_range().start_time.value + source_out = source_in + clip_duration + + # define starting frame for future shot + frame_start = ( + clip_in if workfile_start_frame is None + else workfile_start_frame + ) + frame_end = frame_start + (clip_duration - 1) + + # create shared new instance data + instance_data = { + "variant": "Main", + "families": ["plate"], + + # shared attributes + "asset": parent_asset_name, + "name": clip_name, + "task": "Compositing", + + # parent time properties + "trackStartFrame": track_start_frame, + + # creator_attributes + "creator_attributes": { + "asset_name": clip_name, + "timeline_offset": timeline_offset, + "workfile_start_frame": workfile_start_frame, + "frameStart": frame_start, + "frameEnd": frame_end, + "fps": fps, + "handle_start": handle_start, + "handle_end": handle_end, + "clipIn": clip_in, + "clipOut": clip_out, + "sourceIn": source_in, + "sourceOut": source_out, + } + } + + c_instance = editorial_clip_creator.create(instance_data, {}) + self.log.debug(f"{pformat(dict(c_instance.data))}") + + def _validate_clip_for_processing(self, clip): + if clip.name is None: + return False + + if isinstance(clip, otio.schema.Gap): + return False + + # skip all generators like black empty + if isinstance( + clip.media_reference, + otio.schema.GeneratorReference): + return False + + # Transitions are ignored, because Clips have the full frame + # range. + if isinstance(clip, otio.schema.Transition): + return False + + return True + + def _validate_name_uniqueness(self, name): + if name not in self.asset_name_check: + self.asset_name_check.append(name) + else: + self.log.warning(f"duplicate shot name: {name}") + def _create_instance(self, family, subset_name, data): # Create new instance new_instance = CreatedInstance(family, subset_name, data, self) @@ -128,7 +320,7 @@ or updating already created. Publishing will create OTIO file. def get_pre_create_attr_defs(self): # Use same attributes as for instance attrobites - return [ + attr_defs = [ FileDef( "sequence_filepath_data", folders=False, @@ -141,6 +333,8 @@ or updating already created. Publishing will create OTIO file. allow_sequences=False, label="Filepath", ), - UISeparatorDef(), - UILabelDef("Clip instance attributes") - ] \ No newline at end of file + UILabelDef("Clip instance attributes"), + UISeparatorDef() + ] + attr_defs.extend(CLIP_ATTR_DEFS) + return attr_defs From c60c0ff2d9abe9ecb312a3f553b9523c4bbae8f2 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 11 Jul 2022 09:58:07 +0200 Subject: [PATCH 225/785] trayp: updating create editorial --- .../plugins/create/create_editorial.py | 66 +++++++++++-------- 1 file changed, 40 insertions(+), 26 deletions(-) diff --git a/openpype/hosts/traypublisher/plugins/create/create_editorial.py b/openpype/hosts/traypublisher/plugins/create/create_editorial.py index ed91f0201f..3164e4aa99 100644 --- a/openpype/hosts/traypublisher/plugins/create/create_editorial.py +++ b/openpype/hosts/traypublisher/plugins/create/create_editorial.py @@ -25,12 +25,6 @@ from openpype.hosts.traypublisher.api.pipeline import HostContext CLIP_ATTR_DEFS = [ - NumberDef( - "timeline_offset", - default=900000, - label="Timeline offset" - ), - UISeparatorDef(), NumberDef( "workfile_start_frame", default=1001, @@ -62,20 +56,20 @@ class EditorialClipInstanceCreator(InvisibleTrayPublishCreator): ) def create(self, instance_data, source_data): - # instance_data > asset, task_name, variant, family - # source_data > additional data self.log.info(f"instance_data: {instance_data}") - self.log.info(f"source_data: {source_data}") + subset_name = instance_data["subset"] + family = instance_data["family"] instance_name = "{}_{}".format( instance_data["name"], - "plateMain" + subset_name ) - return self._create_instance(instance_name, instance_data) + return self._create_instance(instance_name, family, instance_data) + + def _create_instance(self, subset_name, family, data): - def _create_instance(self, subset_name, data): # Create new instance - new_instance = CreatedInstance(self.family, subset_name, data, self) + new_instance = CreatedInstance(family, subset_name, data, self) # Host implementation of storing metadata about instance HostContext.add_instance(new_instance.data_to_store()) # Add instance to current context @@ -109,8 +103,6 @@ or updating already created. Publishing will create OTIO file. """ icon = "fa.file" - - def __init__( self, project_settings, *args, **kwargs ): @@ -132,23 +124,27 @@ or updating already created. Publishing will create OTIO file. k: v for k, v in pre_create_data.items() if k != "sequence_filepath_data" } - # TODO: create otio instance + # Create otio editorial instance asset_name = instance_data["asset"] asset_doc = get_asset_by_name(self.project_name, asset_name) + + # get asset doc data attributes fps = asset_doc["data"]["fps"] instance_data.update({ "fps": fps }) + + # get otio timeline otio_timeline = self._create_otio_instance( subset_name, instance_data, pre_create_data) - # TODO: create clip instances + # Create all clip instances clip_instance_properties.update({ "fps": fps, - "asset_name": asset_name + "parent_asset_name": asset_name }) self._get_clip_instances( - asset_name, otio_timeline, clip_instance_properties) + otio_timeline, clip_instance_properties) def _create_otio_instance(self, subset_name, data, pre_create_data): # get path of sequence @@ -181,18 +177,19 @@ or updating already created. Publishing will create OTIO file. def _get_clip_instances( self, - asset_name, otio_timeline, clip_instance_properties ): - parent_asset_name = clip_instance_properties["asset_name"] + family = "plate" + + # get clip instance properties + parent_asset_name = clip_instance_properties["parent_asset_name"] handle_start = clip_instance_properties["handle_start"] handle_end = clip_instance_properties["handle_end"] timeline_offset = clip_instance_properties["timeline_offset"] workfile_start_frame = clip_instance_properties["workfile_start_frame"] fps = clip_instance_properties["fps"] - assets_shared = {} self.asset_name_check = [] editorial_clip_creator = self.create_context.creators["editorialClip"] @@ -221,7 +218,7 @@ or updating already created. Publishing will create OTIO file. # basic unique asset name clip_name = os.path.splitext(clip.name)[0].lower() - name = f"{asset_name.split('_')[0]}_{clip_name}" + name = f"{parent_asset_name.split('_')[0]}_{clip_name}" # make sure the name is unique self._validate_name_uniqueness(name) @@ -251,14 +248,24 @@ or updating already created. Publishing will create OTIO file. ) frame_end = frame_start + (clip_duration - 1) + # subset name + variant = self.variant + subset_name = "{}{}".format( + family, variant.capitalize() + ) + # create shared new instance data instance_data = { - "variant": "Main", - "families": ["plate"], + "variant": variant, + "family": family, + "families": ["clip"], + "subset": subset_name, - # shared attributes + # HACK: just for temporal bug workaround + # TODO: should loockup shot name for update "asset": parent_asset_name, "name": clip_name, + # HACK: just for temporal bug workaround "task": "Compositing", # parent time properties @@ -334,6 +341,13 @@ or updating already created. Publishing will create OTIO file. label="Filepath", ), UILabelDef("Clip instance attributes"), + UISeparatorDef(), + # TODO: perhpas better would be timecode and fps input + NumberDef( + "timeline_offset", + default=900000, + label="Timeline offset" + ), UISeparatorDef() ] attr_defs.extend(CLIP_ATTR_DEFS) From 370ee0c254a1a3d6eab1e4a27f8cbf4bc5676986 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 11 Jul 2022 15:21:37 +0200 Subject: [PATCH 226/785] trayp: added `fps` enumerator for rate override --- .../plugins/create/create_editorial.py | 26 +++++++++++++++++-- 1 file changed, 24 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/traypublisher/plugins/create/create_editorial.py b/openpype/hosts/traypublisher/plugins/create/create_editorial.py index 3164e4aa99..406a7bc3b3 100644 --- a/openpype/hosts/traypublisher/plugins/create/create_editorial.py +++ b/openpype/hosts/traypublisher/plugins/create/create_editorial.py @@ -25,6 +25,18 @@ from openpype.hosts.traypublisher.api.pipeline import HostContext CLIP_ATTR_DEFS = [ + EnumDef( + "fps", + items={ + "from_project": "From project", + 23.997: "23.976", + 24: "24", + 25: "25", + 29.97: "29.97", + 30: "30" + }, + label="FPS" + ), NumberDef( "workfile_start_frame", default=1001, @@ -128,8 +140,14 @@ or updating already created. Publishing will create OTIO file. asset_name = instance_data["asset"] asset_doc = get_asset_by_name(self.project_name, asset_name) - # get asset doc data attributes - fps = asset_doc["data"]["fps"] + self.log.info(pre_create_data["fps"]) + + if pre_create_data["fps"] == "from_project": + # get asset doc data attributes + fps = asset_doc["data"]["fps"] + else: + fps = float(pre_create_data["fps"]) + instance_data.update({ "fps": fps }) @@ -149,6 +167,10 @@ or updating already created. Publishing will create OTIO file. def _create_otio_instance(self, subset_name, data, pre_create_data): # get path of sequence file_path_data = pre_create_data["sequence_filepath_data"] + + if len(file_path_data["filenames"]) == 0: + raise FileExistsError("File path was not added") + file_path = os.path.join( file_path_data["directory"], file_path_data["filenames"][0]) From 3f7dfb6579394237dfbf18c488c0586b06129fa2 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 11 Jul 2022 17:43:00 +0200 Subject: [PATCH 227/785] trayp: removing task --- openpype/hosts/traypublisher/plugins/create/create_editorial.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/openpype/hosts/traypublisher/plugins/create/create_editorial.py b/openpype/hosts/traypublisher/plugins/create/create_editorial.py index 406a7bc3b3..6c8c1abdae 100644 --- a/openpype/hosts/traypublisher/plugins/create/create_editorial.py +++ b/openpype/hosts/traypublisher/plugins/create/create_editorial.py @@ -287,8 +287,6 @@ or updating already created. Publishing will create OTIO file. # TODO: should loockup shot name for update "asset": parent_asset_name, "name": clip_name, - # HACK: just for temporal bug workaround - "task": "Compositing", # parent time properties "trackStartFrame": track_start_frame, From ccffaa38bac0b4f7a6b4bbd500864dfa99cc87be Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 11 Jul 2022 17:43:46 +0200 Subject: [PATCH 228/785] trayp: adding label to created instance --- .../plugins/create/create_editorial.py | 19 ++++++++++++------- .../publish/collect_editorial_instances.py | 9 +++++---- 2 files changed, 17 insertions(+), 11 deletions(-) diff --git a/openpype/hosts/traypublisher/plugins/create/create_editorial.py b/openpype/hosts/traypublisher/plugins/create/create_editorial.py index 6c8c1abdae..e47d28447b 100644 --- a/openpype/hosts/traypublisher/plugins/create/create_editorial.py +++ b/openpype/hosts/traypublisher/plugins/create/create_editorial.py @@ -72,16 +72,14 @@ class EditorialClipInstanceCreator(InvisibleTrayPublishCreator): subset_name = instance_data["subset"] family = instance_data["family"] - instance_name = "{}_{}".format( - instance_data["name"], - subset_name - ) - return self._create_instance(instance_name, family, instance_data) + return self._create_instance(subset_name, family, instance_data) def _create_instance(self, subset_name, family, data): # Create new instance new_instance = CreatedInstance(family, subset_name, data, self) + self.log.info(f"instance_data: {pformat(new_instance.data)}") + # Host implementation of storing metadata about instance HostContext.add_instance(new_instance.data_to_store()) # Add instance to current context @@ -271,13 +269,20 @@ or updating already created. Publishing will create OTIO file. frame_end = frame_start + (clip_duration - 1) # subset name - variant = self.variant + variant = self.get_variant() + self.log.info( + f"__ variant: {variant}") + subset_name = "{}{}".format( family, variant.capitalize() ) - + label = "{}_{}".format( + clip_name, + subset_name + ) # create shared new instance data instance_data = { + "label": label, "variant": variant, "family": family, "families": ["clip"], diff --git a/openpype/hosts/traypublisher/plugins/publish/collect_editorial_instances.py b/openpype/hosts/traypublisher/plugins/publish/collect_editorial_instances.py index 874b6101c3..6521c97774 100644 --- a/openpype/hosts/traypublisher/plugins/publish/collect_editorial_instances.py +++ b/openpype/hosts/traypublisher/plugins/publish/collect_editorial_instances.py @@ -1,18 +1,17 @@ import os +from pprint import pformat import pyblish.api class CollectSettingsSimpleInstances(pyblish.api.InstancePlugin): """Collect data for instances created by settings creators.""" - label = "Collect Settings Simple Instances" + label = "Collect Editorial Instances" order = pyblish.api.CollectorOrder - 0.49 hosts = ["traypublisher"] def process(self, instance): - if not instance.data.get("ediorial_creator"): - return if "families" not in instance.data: instance.data["families"] = [] @@ -20,7 +19,9 @@ class CollectSettingsSimpleInstances(pyblish.api.InstancePlugin): if "representations" not in instance.data: instance.data["representations"] = [] repres = instance.data["representations"] - + self.log.debug( + pformat(dict(instance.data)) + ) creator_attributes = instance.data["creator_attributes"] filepath_item = creator_attributes["filepath"] self.log.info(filepath_item) From e77d4a11d82c212930337609158767bf0de2a142 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 11 Jul 2022 18:03:20 +0200 Subject: [PATCH 229/785] trayp: variant rework and timecode offset default to 0 --- .../plugins/create/create_editorial.py | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/traypublisher/plugins/create/create_editorial.py b/openpype/hosts/traypublisher/plugins/create/create_editorial.py index e47d28447b..643c8a2a84 100644 --- a/openpype/hosts/traypublisher/plugins/create/create_editorial.py +++ b/openpype/hosts/traypublisher/plugins/create/create_editorial.py @@ -160,7 +160,10 @@ or updating already created. Publishing will create OTIO file. "parent_asset_name": asset_name }) self._get_clip_instances( - otio_timeline, clip_instance_properties) + otio_timeline, + clip_instance_properties, + variant=instance_data["variant"] + ) def _create_otio_instance(self, subset_name, data, pre_create_data): # get path of sequence @@ -198,7 +201,8 @@ or updating already created. Publishing will create OTIO file. def _get_clip_instances( self, otio_timeline, - clip_instance_properties + clip_instance_properties, + variant ): family = "plate" @@ -251,6 +255,7 @@ or updating already created. Publishing will create OTIO file. self.log.info(f"clip_in: {clip_in} | clip_out: {clip_out}") # add offset in case there is any + self.log.debug(f"__ timeline_offset: {timeline_offset}") if timeline_offset: clip_in += timeline_offset clip_out += timeline_offset @@ -269,7 +274,6 @@ or updating already created. Publishing will create OTIO file. frame_end = frame_start + (clip_duration - 1) # subset name - variant = self.get_variant() self.log.info( f"__ variant: {variant}") @@ -292,6 +296,7 @@ or updating already created. Publishing will create OTIO file. # TODO: should loockup shot name for update "asset": parent_asset_name, "name": clip_name, + "task": "", # parent time properties "trackStartFrame": track_start_frame, @@ -370,7 +375,7 @@ or updating already created. Publishing will create OTIO file. # TODO: perhpas better would be timecode and fps input NumberDef( "timeline_offset", - default=900000, + default=0, label="Timeline offset" ), UISeparatorDef() From 74e57f9f49cdbb4812cd6d0c5348c5877f4e61e1 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 11 Jul 2022 18:15:46 +0200 Subject: [PATCH 230/785] OP-3446 - implemented render_mov_batch in TrayPublisher --- openpype/hosts/traypublisher/api/batch_lib.py | 61 ++++++ .../plugins/create/create_mov_batch.py | 191 ++++++++++++++++++ .../plugins/publish/collect_mov_batch.py | 34 ++++ .../project_settings/traypublisher.json | 15 +- .../schema_project_traypublisher.json | 71 +++++++ 5 files changed, 371 insertions(+), 1 deletion(-) create mode 100644 openpype/hosts/traypublisher/api/batch_lib.py create mode 100644 openpype/hosts/traypublisher/plugins/create/create_mov_batch.py create mode 100644 openpype/hosts/traypublisher/plugins/publish/collect_mov_batch.py diff --git a/openpype/hosts/traypublisher/api/batch_lib.py b/openpype/hosts/traypublisher/api/batch_lib.py new file mode 100644 index 0000000000..2486d405bd --- /dev/null +++ b/openpype/hosts/traypublisher/api/batch_lib.py @@ -0,0 +1,61 @@ +# Helper functions to find matching asset for (multiple) processed source files +import os +import collections + +from openpype.client import get_assets + + +def get_children_assets_by_name(project_name, top_asset_doc): + """ Get all children for 'top_asset_doc' by theirs name + + Args: + project_name (str) + top_asset_doc (asset doc) (eg dict) + Returns: + (dict) {"shot1": shot1_asset_doc} + """ + assets_by_parent_id = get_asset_docs_by_parent_id(project_name) + _children_docs = get_children_docs( + assets_by_parent_id, top_asset_doc + ) + children_docs = { + children_doc["name"].lower(): children_doc + for children_doc in _children_docs + } + return children_docs + + +def get_asset_docs_by_parent_id(project_name): + """ Query all assets for project and store them by parent's id to list + + Args: + project_name (str) + Returns: + (dict) { _id of parent :[asset_doc1, asset_doc2]} + """ + asset_docs_by_parent_id = collections.defaultdict(list) + for asset_doc in get_assets(project_name): + parent_id = asset_doc["data"]["visualParent"] + asset_docs_by_parent_id[parent_id].append(asset_doc) + return asset_docs_by_parent_id + + +def get_children_docs(documents_by_parent_id, parent_doc): + """ Recursively find all children in reverse order + + Last children first. + Args: + documents_by_parent_id (dict) + parent_doc (asset doc, eg dict) + Returns + (list) of asset docs + """ + output = [] + children = documents_by_parent_id.get(parent_doc["_id"]) or tuple() + for child in children: + output.extend( + get_children_docs(documents_by_parent_id, child) + ) + output.append(parent_doc) + return output + diff --git a/openpype/hosts/traypublisher/plugins/create/create_mov_batch.py b/openpype/hosts/traypublisher/plugins/create/create_mov_batch.py new file mode 100644 index 0000000000..5297d73ba9 --- /dev/null +++ b/openpype/hosts/traypublisher/plugins/create/create_mov_batch.py @@ -0,0 +1,191 @@ +import copy +import os +import re + +from openpype.client import get_assets +from openpype.hosts.traypublisher.api import pipeline +from openpype.lib import FileDef, TextDef, get_subset_name_with_asset_doc +from openpype.pipeline import ( + CreatedInstance +) + +from openpype.hosts.traypublisher.api.plugin import TrayPublishCreator + + +class BatchMovCreator(TrayPublishCreator): + """Creates instances from .mov file(s).""" + identifier = "render_mov_batch" + label = "Batch Mov" + family = "render" + description = "Publish batch of movs" + host_name = "traypublisher" + + create_allow_context_change = False + version_regex = re.compile(r"^(.+)_v([0-9]+)$") + + default_tasks = ["Compositing"] + + extensions = [".mov"] + + def __init__(self, project_settings, *args, **kwargs): + super(BatchMovCreator, self).__init__(project_settings, + *args, **kwargs) + self._default_variants = (project_settings["traypublisher"] + ["BatchMovCreator"] + ["default_variants"]) + + def get_icon(self): + return "fa.file" + + def create(self, subset_name, data, pre_create_data): + file_paths = pre_create_data.get("filepath") + if not file_paths: + return + + for file_info in file_paths: + instance_data = copy.deepcopy(data) + file_name = file_info["filenames"][0] + filepath = os.path.join(file_info["directory"], file_name) + instance_data["creator_attributes"] = {"filepath": filepath} + + asset_doc, version = self.get_asset_doc_from_file_name( + file_name, self.project_name) + + subset_name, task_name = self._get_subset_and_task( + asset_doc, data["variant"], self.project_name) + + instance_data["task"] = task_name + instance_data["asset"] = asset_doc["name"] + + # Create new instance + new_instance = CreatedInstance(self.family, subset_name, + instance_data, self) + # Host implementation of storing metadata about instance + pipeline.HostContext.add_instance(new_instance.data_to_store()) + # Add instance to current context + self._add_instance_to_context(new_instance) + + def get_asset_doc_from_file_name(self, source_filename, project_name): + """Try to parse out asset name from file name provided. + + Artists might provide various file name formats. + Currently handled: + - chair.mov + - chair_v001.mov + - my_chair_to_upload.mov + """ + version = None + asset_name = os.path.splitext(source_filename)[0] + # Always first check if source filename is in assets + matching_asset_doc = self._get_asset_by_name_case_not_sensitive( + project_name, asset_name) + + if matching_asset_doc is None: + matching_asset_doc, version = ( + self._parse_with_version(project_name, asset_name)) + + if matching_asset_doc is None: + matching_asset_doc = self._parse_containing(project_name, + asset_name) + + if matching_asset_doc is None: + raise ValueError( + "Cannot guess asset name from {}".format(source_filename)) + + return matching_asset_doc, version + + def _parse_with_version(self, project_name, asset_name): + """Try to parse asset name from a file name containing version too + + Eg. 'chair_v001.mov' >> 'chair', 1 + """ + self.log.debug(( + "Asset doc by \"{}\" was not found, trying version regex." + ).format(asset_name)) + + matching_asset_doc = version_number = None + + regex_result = self.version_regex.findall(asset_name) + if regex_result: + _asset_name, _version_number = regex_result[0] + matching_asset_doc = self._get_asset_by_name_case_not_sensitive( + project_name, _asset_name) + if matching_asset_doc: + version_number = int(_version_number) + + return matching_asset_doc, version_number + + def _parse_containing(self, project_name, asset_name): + """Look if file name contains any existing asset name""" + for asset_doc in get_assets(project_name, fields=["name"]): + if asset_doc["name"].lower() in asset_name.lower(): + return get_assets(project_name, + asset_names=[asset_doc["name"]]) + + def _get_subset_and_task(self, asset_doc, variant, project_name): + """Create subset name according to standard template process""" + task_name = self._get_task_name(asset_doc) + + subset_name = get_subset_name_with_asset_doc( + self.family, + variant, + task_name, + asset_doc, + project_name + ) + + return subset_name, task_name + + def _get_task_name(self, asset_doc): + """Get applicable task from 'asset_doc' """ + available_task_names = {} + asset_tasks = asset_doc.get("data", {}).get("tasks") or {} + for task_name in asset_tasks.keys(): + available_task_names[task_name.lower()] = task_name + + task_name = None + for _task_name in self.default_tasks: + _task_name_low = _task_name.lower() + if _task_name_low in available_task_names: + task_name = available_task_names[_task_name_low] + break + + return task_name + + def get_default_variants(self): + return self._default_variants + + def get_instance_attr_defs(self): + return [] + + def get_pre_create_attr_defs(self): + # Use same attributes as for instance attributes + return [ + FileDef( + "filepath", + folders=False, + single_item=False, + extensions=self.extensions, + label="Filepath" + ) + ] + + def get_detail_description(self): + return """# Publish batch of .mov to multiple assets. + + File names must then contain only asset name, or asset name + version. + (eg. 'chair.mov', 'chair_v001.mov', not really safe `my_chair_v001.mov` + """ + + def _get_asset_by_name_case_not_sensitive(self, project_name, asset_name): + """Handle more cases in file names""" + asset_name = re.compile(asset_name, re.IGNORECASE) + + assets = list(get_assets(project_name, asset_names=[asset_name])) + if assets: + if len(assets) > 1: + self.log.warning("Too many records found for {}".format( + asset_name)) + return + + return assets.pop() diff --git a/openpype/hosts/traypublisher/plugins/publish/collect_mov_batch.py b/openpype/hosts/traypublisher/plugins/publish/collect_mov_batch.py new file mode 100644 index 0000000000..2a5e356684 --- /dev/null +++ b/openpype/hosts/traypublisher/plugins/publish/collect_mov_batch.py @@ -0,0 +1,34 @@ +import os + +import pyblish.api +from openpype.pipeline import OpenPypePyblishPluginMixin + + +class CollectMovBatch( + pyblish.api.InstancePlugin, OpenPypePyblishPluginMixin +): + """Collect file url for batch mov and create representation.""" + + label = "Collect Mov Batch Files" + order = pyblish.api.CollectorOrder + + hosts = ["traypublisher"] + + def process(self, instance): + if not instance.data.get("creator_identifier") == "render_mov_batch": + return + + file_url = instance.data["creator_attributes"]["filepath"] + file_name = os.path.basename(file_url) + _, ext = os.path.splitext(file_name) + + repre = { + "name": ext[1:], + "ext": ext[1:], + "files": file_name, + "stagingDir": os.path.dirname(file_url) + } + + instance.data["representations"].append(repre) + + self.log.debug("instance.data {}".format(instance.data)) diff --git a/openpype/settings/defaults/project_settings/traypublisher.json b/openpype/settings/defaults/project_settings/traypublisher.json index 0b54cfd39e..6d2d32a037 100644 --- a/openpype/settings/defaults/project_settings/traypublisher.json +++ b/openpype/settings/defaults/project_settings/traypublisher.json @@ -31,5 +31,18 @@ ".aep" ] } - ] + ], + "BatchMovCreator": { + "family": "render_mov_batch", + "identifier": "", + "label": "Batch Mov", + "icon": "fa.file", + "default_variants": [], + "description": "", + "detailed_description": "", + "default_tasks": "Compositing", + "extensions": [ + ".mov" + ] + } } \ No newline at end of file diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_traypublisher.json b/openpype/settings/entities/schemas/projects_schema/schema_project_traypublisher.json index 55c1b7b7d7..7cb74d86a7 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_traypublisher.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_traypublisher.json @@ -78,6 +78,77 @@ } ] } + }, + { + "type": "dict", + "collapsible": true, + "key": "BatchMovCreator", + "label": "Batch Mov Creator", + "use_label_wrap": true, + "collapsible_key": true, + "children": [ + { + "type": "text", + "key": "family", + "label": "Family" + }, + { + "type": "text", + "key": "identifier", + "label": "Identifier", + "placeholder": "< Use 'Family' >", + "tooltip": "All creators must have unique identifier.\nBy default is used 'family' but if you need to have more creators with same families\nyou have to set identifier too." + }, + { + "type": "text", + "key": "label", + "label": "Label" + }, + { + "type": "text", + "key": "icon", + "label": "Icon" + }, + { + "type": "list", + "key": "default_variants", + "label": "Default variants", + "object_type": { + "type": "text" + } + }, + { + "type": "separator" + }, + { + "type": "text", + "key": "description", + "label": "Description" + }, + { + "type": "text", + "key": "detailed_description", + "label": "Detailed Description", + "multiline": true + }, + { + "type": "separator" + }, + { + "type": "text", + "key": "default_tasks", + "label": "Default task" + }, + { + "type": "list", + "key": "extensions", + "label": "Extensions", + "use_label_wrap": true, + "collapsible_key": true, + "collapsed": false, + "object_type": "text" + } + ] } ] } From a7c945f51f484a848401072fd230334710f3b734 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 12 Jul 2022 11:01:52 +0200 Subject: [PATCH 231/785] OP-3446 - fix return type --- .../traypublisher/plugins/create/create_mov_batch.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/openpype/hosts/traypublisher/plugins/create/create_mov_batch.py b/openpype/hosts/traypublisher/plugins/create/create_mov_batch.py index 5297d73ba9..d796b304a3 100644 --- a/openpype/hosts/traypublisher/plugins/create/create_mov_batch.py +++ b/openpype/hosts/traypublisher/plugins/create/create_mov_batch.py @@ -2,11 +2,12 @@ import copy import os import re -from openpype.client import get_assets +from openpype.client import get_assets, get_asset_by_name from openpype.hosts.traypublisher.api import pipeline -from openpype.lib import FileDef, TextDef, get_subset_name_with_asset_doc +from openpype.lib import FileDef, get_subset_name_with_asset_doc from openpype.pipeline import ( - CreatedInstance + CreatedInstance, + CreatorError ) from openpype.hosts.traypublisher.api.plugin import TrayPublishCreator @@ -89,7 +90,7 @@ class BatchMovCreator(TrayPublishCreator): asset_name) if matching_asset_doc is None: - raise ValueError( + raise CreatorError( "Cannot guess asset name from {}".format(source_filename)) return matching_asset_doc, version @@ -119,8 +120,7 @@ class BatchMovCreator(TrayPublishCreator): """Look if file name contains any existing asset name""" for asset_doc in get_assets(project_name, fields=["name"]): if asset_doc["name"].lower() in asset_name.lower(): - return get_assets(project_name, - asset_names=[asset_doc["name"]]) + return get_asset_by_name(project_name, asset_doc["name"]) def _get_subset_and_task(self, asset_doc, variant, project_name): """Create subset name according to standard template process""" From c3384c4005132c2eb973d0bda8a191bb3fd6791c Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 12 Jul 2022 11:02:06 +0200 Subject: [PATCH 232/785] OP-3446 - remove obsolete methods --- openpype/hosts/traypublisher/api/batch_lib.py | 61 ------------------- 1 file changed, 61 deletions(-) delete mode 100644 openpype/hosts/traypublisher/api/batch_lib.py diff --git a/openpype/hosts/traypublisher/api/batch_lib.py b/openpype/hosts/traypublisher/api/batch_lib.py deleted file mode 100644 index 2486d405bd..0000000000 --- a/openpype/hosts/traypublisher/api/batch_lib.py +++ /dev/null @@ -1,61 +0,0 @@ -# Helper functions to find matching asset for (multiple) processed source files -import os -import collections - -from openpype.client import get_assets - - -def get_children_assets_by_name(project_name, top_asset_doc): - """ Get all children for 'top_asset_doc' by theirs name - - Args: - project_name (str) - top_asset_doc (asset doc) (eg dict) - Returns: - (dict) {"shot1": shot1_asset_doc} - """ - assets_by_parent_id = get_asset_docs_by_parent_id(project_name) - _children_docs = get_children_docs( - assets_by_parent_id, top_asset_doc - ) - children_docs = { - children_doc["name"].lower(): children_doc - for children_doc in _children_docs - } - return children_docs - - -def get_asset_docs_by_parent_id(project_name): - """ Query all assets for project and store them by parent's id to list - - Args: - project_name (str) - Returns: - (dict) { _id of parent :[asset_doc1, asset_doc2]} - """ - asset_docs_by_parent_id = collections.defaultdict(list) - for asset_doc in get_assets(project_name): - parent_id = asset_doc["data"]["visualParent"] - asset_docs_by_parent_id[parent_id].append(asset_doc) - return asset_docs_by_parent_id - - -def get_children_docs(documents_by_parent_id, parent_doc): - """ Recursively find all children in reverse order - - Last children first. - Args: - documents_by_parent_id (dict) - parent_doc (asset doc, eg dict) - Returns - (list) of asset docs - """ - output = [] - children = documents_by_parent_id.get(parent_doc["_id"]) or tuple() - for child in children: - output.extend( - get_children_docs(documents_by_parent_id, child) - ) - output.append(parent_doc) - return output - From 24e26ce63a478941dd4334c7558b92c5eb853b95 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 12 Jul 2022 11:05:35 +0200 Subject: [PATCH 233/785] OP-3446 - fill defaults_variant explicitly No need to overwrite method --- .../traypublisher/plugins/create/create_mov_batch.py | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/openpype/hosts/traypublisher/plugins/create/create_mov_batch.py b/openpype/hosts/traypublisher/plugins/create/create_mov_batch.py index d796b304a3..1577b622ab 100644 --- a/openpype/hosts/traypublisher/plugins/create/create_mov_batch.py +++ b/openpype/hosts/traypublisher/plugins/create/create_mov_batch.py @@ -31,9 +31,9 @@ class BatchMovCreator(TrayPublishCreator): def __init__(self, project_settings, *args, **kwargs): super(BatchMovCreator, self).__init__(project_settings, *args, **kwargs) - self._default_variants = (project_settings["traypublisher"] - ["BatchMovCreator"] - ["default_variants"]) + self.default_variants = (project_settings["traypublisher"] + ["BatchMovCreator"] + ["default_variants"]) def get_icon(self): return "fa.file" @@ -152,9 +152,6 @@ class BatchMovCreator(TrayPublishCreator): return task_name - def get_default_variants(self): - return self._default_variants - def get_instance_attr_defs(self): return [] From 89eddfa63d91eb1de766363b9d9c8c7899d39ad1 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 12 Jul 2022 11:17:55 +0200 Subject: [PATCH 234/785] OP-3446 - fix pulling configuration from Settings --- .../traypublisher/plugins/create/create_mov_batch.py | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/traypublisher/plugins/create/create_mov_batch.py b/openpype/hosts/traypublisher/plugins/create/create_mov_batch.py index 1577b622ab..e54fc44acc 100644 --- a/openpype/hosts/traypublisher/plugins/create/create_mov_batch.py +++ b/openpype/hosts/traypublisher/plugins/create/create_mov_batch.py @@ -24,10 +24,6 @@ class BatchMovCreator(TrayPublishCreator): create_allow_context_change = False version_regex = re.compile(r"^(.+)_v([0-9]+)$") - default_tasks = ["Compositing"] - - extensions = [".mov"] - def __init__(self, project_settings, *args, **kwargs): super(BatchMovCreator, self).__init__(project_settings, *args, **kwargs) @@ -35,6 +31,14 @@ class BatchMovCreator(TrayPublishCreator): ["BatchMovCreator"] ["default_variants"]) + self.default_tasks = (project_settings["traypublisher"] + ["BatchMovCreator"] + ["default_tasks"]) + + self.extensions = (project_settings["traypublisher"] + ["BatchMovCreator"] + ["extensions"]) + def get_icon(self): return "fa.file" From 1fe8c96c261e9687b5aaca52e53d88897fef540a Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 12 Jul 2022 11:18:31 +0200 Subject: [PATCH 235/785] OP-3446 - modifying Settings to list --- .../settings/defaults/project_settings/traypublisher.json | 4 ++-- .../projects_schema/schema_project_traypublisher.json | 7 +++++-- 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/openpype/settings/defaults/project_settings/traypublisher.json b/openpype/settings/defaults/project_settings/traypublisher.json index 6d2d32a037..36526d01b0 100644 --- a/openpype/settings/defaults/project_settings/traypublisher.json +++ b/openpype/settings/defaults/project_settings/traypublisher.json @@ -37,10 +37,10 @@ "identifier": "", "label": "Batch Mov", "icon": "fa.file", - "default_variants": [], + "default_variants": ["Main"], "description": "", "detailed_description": "", - "default_tasks": "Compositing", + "default_tasks": ["Compositing"], "extensions": [ ".mov" ] diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_traypublisher.json b/openpype/settings/entities/schemas/projects_schema/schema_project_traypublisher.json index 7cb74d86a7..308883d46f 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_traypublisher.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_traypublisher.json @@ -135,9 +135,12 @@ "type": "separator" }, { - "type": "text", + "type": "list", "key": "default_tasks", - "label": "Default task" + "label": "Default tasks", + "object_type": { + "type": "text" + } }, { "type": "list", From 20b0292af170360db2aecb0483afe3ae13dd3bcd Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 12 Jul 2022 11:19:27 +0200 Subject: [PATCH 236/785] OP-3446 - removed unneeded, comes from class --- openpype/hosts/traypublisher/plugins/create/create_mov_batch.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/hosts/traypublisher/plugins/create/create_mov_batch.py b/openpype/hosts/traypublisher/plugins/create/create_mov_batch.py index e54fc44acc..fdada96c87 100644 --- a/openpype/hosts/traypublisher/plugins/create/create_mov_batch.py +++ b/openpype/hosts/traypublisher/plugins/create/create_mov_batch.py @@ -19,7 +19,6 @@ class BatchMovCreator(TrayPublishCreator): label = "Batch Mov" family = "render" description = "Publish batch of movs" - host_name = "traypublisher" create_allow_context_change = False version_regex = re.compile(r"^(.+)_v([0-9]+)$") From 08afb46ab4f8cbfcbed481a2a03665c47b29a49f Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 12 Jul 2022 12:19:04 +0200 Subject: [PATCH 237/785] trayp: implementing variants from settings --- .../plugins/create/create_editorial.py | 125 +++++++++++------- .../project_settings/traypublisher.json | 24 +++- .../schema_project_traypublisher.json | 26 ++++ 3 files changed, 127 insertions(+), 48 deletions(-) diff --git a/openpype/hosts/traypublisher/plugins/create/create_editorial.py b/openpype/hosts/traypublisher/plugins/create/create_editorial.py index 643c8a2a84..fdcdd74c88 100644 --- a/openpype/hosts/traypublisher/plugins/create/create_editorial.py +++ b/openpype/hosts/traypublisher/plugins/create/create_editorial.py @@ -130,9 +130,12 @@ or updating already created. Publishing will create OTIO file. self.default_variants = self._creator_settings["default_variants"] def create(self, subset_name, instance_data, pre_create_data): + allowed_variants = self._get_allowed_variants(pre_create_data) + clip_instance_properties = { k: v for k, v in pre_create_data.items() if k != "sequence_filepath_data" + if k not in self._creator_settings["variants"] } # Create otio editorial instance asset_name = instance_data["asset"] @@ -162,7 +165,9 @@ or updating already created. Publishing will create OTIO file. self._get_clip_instances( otio_timeline, clip_instance_properties, - variant=instance_data["variant"] + variant_name=instance_data["variant"], + variants=allowed_variants + ) def _create_otio_instance(self, subset_name, data, pre_create_data): @@ -202,10 +207,9 @@ or updating already created. Publishing will create OTIO file. self, otio_timeline, clip_instance_properties, - variant + variant_name, + variants ): - family = "plate" - # get clip instance properties parent_asset_name = clip_instance_properties["parent_asset_name"] handle_start = clip_instance_properties["handle_start"] @@ -273,53 +277,73 @@ or updating already created. Publishing will create OTIO file. ) frame_end = frame_start + (clip_duration - 1) - # subset name - self.log.info( - f"__ variant: {variant}") + for family, _vconf in variants.items(): + self.log.debug(f"__ family: {family}") + self.log.debug(f"__ _vconf: {_vconf}") - subset_name = "{}{}".format( - family, variant.capitalize() - ) - label = "{}_{}".format( - clip_name, - subset_name - ) - # create shared new instance data - instance_data = { - "label": label, - "variant": variant, - "family": family, - "families": ["clip"], - "subset": subset_name, + families = ["clip"] - # HACK: just for temporal bug workaround - # TODO: should loockup shot name for update - "asset": parent_asset_name, - "name": clip_name, - "task": "", + # add review family if defined + if _vconf.get("review"): + families.append("review") - # parent time properties - "trackStartFrame": track_start_frame, + # subset name + subset_name = "{}{}".format( + family, variant_name.capitalize() + ) + label = "{}_{}".format( + clip_name, + subset_name + ) - # creator_attributes - "creator_attributes": { - "asset_name": clip_name, - "timeline_offset": timeline_offset, - "workfile_start_frame": workfile_start_frame, - "frameStart": frame_start, - "frameEnd": frame_end, - "fps": fps, - "handle_start": handle_start, - "handle_end": handle_end, - "clipIn": clip_in, - "clipOut": clip_out, - "sourceIn": source_in, - "sourceOut": source_out, + # create shared new instance data + instance_data = { + "label": label, + "variant": variant_name, + "family": family, + "families": families, + "subset": subset_name, + + # HACK: just for temporal bug workaround + # TODO: should loockup shot name for update + "asset": parent_asset_name, + "name": clip_name, + "task": "", + + # parent time properties + "trackStartFrame": track_start_frame, + + # allowed file ext from settings + "filterExt": _vconf["filter_ext"], + + # creator_attributes + "creator_attributes": { + "asset_name": clip_name, + "timeline_offset": timeline_offset, + "workfile_start_frame": workfile_start_frame, + "frameStart": frame_start, + "frameEnd": frame_end, + "fps": fps, + "handle_start": handle_start, + "handle_end": handle_end, + "clipIn": clip_in, + "clipOut": clip_out, + "sourceIn": source_in, + "sourceOut": source_out, + } } - } - c_instance = editorial_clip_creator.create(instance_data, {}) - self.log.debug(f"{pformat(dict(c_instance.data))}") + c_instance = editorial_clip_creator.create( + instance_data, {}) + self.log.debug(f"{pformat(dict(c_instance.data))}") + + def _get_allowed_variants(self, pre_create_data): + self.log.debug(f"__ pre_create_data: {pre_create_data}") + return { + key: value + for key, value in self._creator_settings["variants"].items() + if pre_create_data[key] + } def _validate_clip_for_processing(self, clip): if clip.name is None: @@ -370,15 +394,22 @@ or updating already created. Publishing will create OTIO file. allow_sequences=False, label="Filepath", ), - UILabelDef("Clip instance attributes"), - UISeparatorDef(), # TODO: perhpas better would be timecode and fps input NumberDef( "timeline_offset", default=0, label="Timeline offset" ), + UISeparatorDef(), + UILabelDef("Clip instance attributes"), UISeparatorDef() ] + # add variants swithers + attr_defs.extend( + BoolDef(_var, label=_var) + for _var in self._creator_settings["variants"] + ) + attr_defs.append(UISeparatorDef()) + attr_defs.extend(CLIP_ATTR_DEFS) return attr_defs diff --git a/openpype/settings/defaults/project_settings/traypublisher.json b/openpype/settings/defaults/project_settings/traypublisher.json index ef6dc5fec7..7f572cf1fb 100644 --- a/openpype/settings/defaults/project_settings/traypublisher.json +++ b/openpype/settings/defaults/project_settings/traypublisher.json @@ -36,7 +36,29 @@ "editorialSimple": { "default_variants": [ "Main" - ] + ], + "variants": { + "reference": { + "review": true, + "filter_ext": [ + "mov", + "mp4" + ] + }, + "plate": { + "review": false, + "filter_ext": [ + "mov", + "mp4" + ] + }, + "audio": { + "review": false, + "filter_ext": [ + "wav" + ] + } + } } } } \ No newline at end of file diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_traypublisher.json b/openpype/settings/entities/schemas/projects_schema/schema_project_traypublisher.json index 11ae0e65a7..38597eeb97 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_traypublisher.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_traypublisher.json @@ -103,6 +103,32 @@ "object_type": { "type": "text" } + }, + { + "type": "splitter" + }, + { + "key": "variants", + "label": "Variants", + "type": "dict-modifiable", + "highlight_content": true, + "object_type": { + "type": "dict", + "children": [ + { + "type": "boolean", + "key": "review", + "label": "Review", + "default": true + }, + { + "type": "list", + "key": "filter_ext", + "label": "Allowed input file types", + "object_type": "text" + } + ] + } } ] } From 636206f9d2bab1e22337ebbd09c7db68b99e2fa2 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 12 Jul 2022 12:19:22 +0200 Subject: [PATCH 238/785] Update openpype/hosts/traypublisher/plugins/create/create_mov_batch.py Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> --- .../plugins/create/create_mov_batch.py | 17 ++++++----------- 1 file changed, 6 insertions(+), 11 deletions(-) diff --git a/openpype/hosts/traypublisher/plugins/create/create_mov_batch.py b/openpype/hosts/traypublisher/plugins/create/create_mov_batch.py index fdada96c87..20d3ecbd7c 100644 --- a/openpype/hosts/traypublisher/plugins/create/create_mov_batch.py +++ b/openpype/hosts/traypublisher/plugins/create/create_mov_batch.py @@ -26,17 +26,12 @@ class BatchMovCreator(TrayPublishCreator): def __init__(self, project_settings, *args, **kwargs): super(BatchMovCreator, self).__init__(project_settings, *args, **kwargs) - self.default_variants = (project_settings["traypublisher"] - ["BatchMovCreator"] - ["default_variants"]) - - self.default_tasks = (project_settings["traypublisher"] - ["BatchMovCreator"] - ["default_tasks"]) - - self.extensions = (project_settings["traypublisher"] - ["BatchMovCreator"] - ["extensions"]) + creator_settings = ( + project_settings["traypublisher"]["BatchMovCreator"] + ) + self.default_variants = creator_settings["default_variants"] + self.default_tasks = creator_settings["default_tasks"] + self.extensions = creator_settings["extensions"] def get_icon(self): return "fa.file" From 64d80f2d05c7639505db50a9c512d01d6e1cf224 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 12 Jul 2022 13:40:24 +0200 Subject: [PATCH 239/785] nuke: adding retime loading option to clip loader --- openpype/hosts/nuke/plugins/load/load_clip.py | 30 ++++++++++++++----- 1 file changed, 23 insertions(+), 7 deletions(-) diff --git a/openpype/hosts/nuke/plugins/load/load_clip.py b/openpype/hosts/nuke/plugins/load/load_clip.py index d177e6ba76..e9530c58c0 100644 --- a/openpype/hosts/nuke/plugins/load/load_clip.py +++ b/openpype/hosts/nuke/plugins/load/load_clip.py @@ -55,7 +55,8 @@ class LoadClip(plugin.NukeLoader): # option gui defaults = { - "start_at_workfile": True + "start_at_workfile": True, + "add_retime": True } options = [ @@ -63,6 +64,11 @@ class LoadClip(plugin.NukeLoader): "start_at_workfile", help="Load at workfile start frame", default=True + ), + qargparse.Boolean( + "add_retime", + help="Load with retime", + default=True ) ] @@ -88,6 +94,9 @@ class LoadClip(plugin.NukeLoader): start_at_workfile = options.get( "start_at_workfile", self.defaults["start_at_workfile"]) + add_retime = options.get( + "add_retime", self.defaults["add_retime"]) + version = context['version'] version_data = version.get("data", {}) repre_id = repre["_id"] @@ -151,7 +160,7 @@ class LoadClip(plugin.NukeLoader): data_imprint = {} for k in add_keys: if k == 'version': - data_imprint.update({k: context["version"]['name']}) + data_imprint[k] = context["version"]['name'] elif k == 'colorspace': colorspace = repre["data"].get(k) colorspace = colorspace or version_data.get(k) @@ -159,10 +168,13 @@ class LoadClip(plugin.NukeLoader): if used_colorspace: data_imprint["used_colorspace"] = used_colorspace else: - data_imprint.update( - {k: context["version"]['data'].get(k, str(None))}) + data_imprint[k] = context["version"]['data'].get( + k, str(None)) - data_imprint.update({"objectName": read_name}) + data_imprint["objectName"] = read_name + + if add_retime and version_data.get("retime", None): + data_imprint["addRetime"] = True read_node["tile_color"].setValue(int("0x4ecd25ff", 16)) @@ -174,7 +186,7 @@ class LoadClip(plugin.NukeLoader): loader=self.__class__.__name__, data=data_imprint) - if version_data.get("retime", None): + if add_retime and version_data.get("retime", None): self._make_retimes(read_node, version_data) self.set_as_member(read_node) @@ -200,6 +212,10 @@ class LoadClip(plugin.NukeLoader): start_at_workfile = bool("start at" in read_node['frame_mode'].value()) + # TODO: find `addRetime` add openpipe data + # add_retime = options.get( + # "add_retime", self.defaults["add_retime"]) + project_name = legacy_io.active_project() version_doc = get_version_by_id(project_name, representation["parent"]) @@ -286,7 +302,7 @@ class LoadClip(plugin.NukeLoader): "updated to version: {}".format(version_doc.get("name")) ) - if version_data.get("retime", None): + if add_retime and version_data.get("retime", None): self._make_retimes(read_node, version_data) else: self.clear_members(read_node) From dfb041d8524fe8b1cc415a5be86a49cb0148f529 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 12 Jul 2022 15:06:46 +0200 Subject: [PATCH 240/785] timers manager is using client query functions --- .../modules/timers_manager/timers_manager.py | 25 ++++++------------- 1 file changed, 8 insertions(+), 17 deletions(-) diff --git a/openpype/modules/timers_manager/timers_manager.py b/openpype/modules/timers_manager/timers_manager.py index 3cf1614316..3453e4bc4c 100644 --- a/openpype/modules/timers_manager/timers_manager.py +++ b/openpype/modules/timers_manager/timers_manager.py @@ -2,13 +2,13 @@ import os import platform +from openpype.client import get_asset_by_name from openpype.modules import OpenPypeModule from openpype_interfaces import ( ITrayService, ILaunchHookPaths ) from openpype.lib.events import register_event_callback -from openpype.pipeline import AvalonMongoDB from .exceptions import InvalidContextError @@ -197,22 +197,13 @@ class TimersManager(OpenPypeModule, ITrayService, ILaunchHookPaths): " Project: \"{}\" Asset: \"{}\" Task: \"{}\"" ).format(str(project_name), str(asset_name), str(task_name))) - dbconn = AvalonMongoDB() - dbconn.install() - dbconn.Session["AVALON_PROJECT"] = project_name - - asset_doc = dbconn.find_one( - { - "type": "asset", - "name": asset_name - }, - { - "data.tasks": True, - "data.parents": True - } + asset_doc = get_asset_by_name( + project_name, + asset_name, + fields=["_id", "name", "data.tasks", "data.parents"] ) + if not asset_doc: - dbconn.uninstall() raise InvalidContextError(( "Asset \"{}\" not found in project \"{}\"" ).format(asset_name, project_name)) @@ -220,7 +211,6 @@ class TimersManager(OpenPypeModule, ITrayService, ILaunchHookPaths): asset_data = asset_doc.get("data") or {} asset_tasks = asset_data.get("tasks") or {} if task_name not in asset_tasks: - dbconn.uninstall() raise InvalidContextError(( "Task \"{}\" not found on asset \"{}\" in project \"{}\"" ).format(task_name, asset_name, project_name)) @@ -238,9 +228,10 @@ class TimersManager(OpenPypeModule, ITrayService, ILaunchHookPaths): hierarchy_items = asset_data.get("parents") or [] hierarchy_items.append(asset_name) - dbconn.uninstall() return { "project_name": project_name, + "asset_id": str(asset_doc["_id"]), + "asset_name": asset_doc["name"], "task_name": task_name, "task_type": task_type, "hierarchy": hierarchy_items From 75285652ff7cacd999b4aa87213e7f0b52955c05 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 12 Jul 2022 15:24:00 +0200 Subject: [PATCH 241/785] trayp: reworking settings presets --- .../plugins/create/create_editorial.py | 25 +++++++++++-------- .../project_settings/traypublisher.json | 15 ++++++----- .../schema_project_traypublisher.json | 19 ++++++++++---- 3 files changed, 37 insertions(+), 22 deletions(-) diff --git a/openpype/hosts/traypublisher/plugins/create/create_editorial.py b/openpype/hosts/traypublisher/plugins/create/create_editorial.py index fdcdd74c88..c68c094218 100644 --- a/openpype/hosts/traypublisher/plugins/create/create_editorial.py +++ b/openpype/hosts/traypublisher/plugins/create/create_editorial.py @@ -56,9 +56,10 @@ CLIP_ATTR_DEFS = [ class EditorialClipInstanceCreator(InvisibleTrayPublishCreator): - identifier = "editorialClip" + identifier = "editorial_clip" family = "clip" host_name = "traypublisher" + label = "Editorial Clip" def __init__( self, project_settings, *args, **kwargs @@ -102,7 +103,7 @@ class EditorialSimpleCreator(TrayPublishCreator): label = "Editorial Simple" family = "editorial" - identifier = "editorialSimple" + identifier = "editorial_simple" default_variants = [ "main" ] @@ -130,12 +131,14 @@ or updating already created. Publishing will create OTIO file. self.default_variants = self._creator_settings["default_variants"] def create(self, subset_name, instance_data, pre_create_data): - allowed_variants = self._get_allowed_variants(pre_create_data) + allowed_variants = self._get_allowed_family_presets(pre_create_data) clip_instance_properties = { k: v for k, v in pre_create_data.items() if k != "sequence_filepath_data" - if k not in self._creator_settings["variants"] + if k not in [ + i["family"] for i in self._creator_settings["family_presets"] + ] } # Create otio editorial instance asset_name = instance_data["asset"] @@ -220,7 +223,7 @@ or updating already created. Publishing will create OTIO file. self.asset_name_check = [] - editorial_clip_creator = self.create_context.creators["editorialClip"] + editorial_clip_creator = self.create_context.creators["editorial_clip"] tracks = otio_timeline.each_child( descended_from_type=otio.schema.Track @@ -337,12 +340,12 @@ or updating already created. Publishing will create OTIO file. instance_data, {}) self.log.debug(f"{pformat(dict(c_instance.data))}") - def _get_allowed_variants(self, pre_create_data): + def _get_allowed_family_presets(self, pre_create_data): self.log.debug(f"__ pre_create_data: {pre_create_data}") return { - key: value - for key, value in self._creator_settings["variants"].items() - if pre_create_data[key] + preset["family"]: preset + for preset in self._creator_settings["family_presets"] + if pre_create_data[preset["family"]] } def _validate_clip_for_processing(self, clip): @@ -406,8 +409,8 @@ or updating already created. Publishing will create OTIO file. ] # add variants swithers attr_defs.extend( - BoolDef(_var, label=_var) - for _var in self._creator_settings["variants"] + BoolDef(_var["family"], label=_var["family"]) + for _var in self._creator_settings["family_presets"] ) attr_defs.append(UISeparatorDef()) diff --git a/openpype/settings/defaults/project_settings/traypublisher.json b/openpype/settings/defaults/project_settings/traypublisher.json index 7f572cf1fb..2717ab6869 100644 --- a/openpype/settings/defaults/project_settings/traypublisher.json +++ b/openpype/settings/defaults/project_settings/traypublisher.json @@ -33,32 +33,35 @@ } ], "editorial_creators": { - "editorialSimple": { + "editorial_simple": { "default_variants": [ "Main" ], - "variants": { - "reference": { + "family_presets": [ + { + "family": "reference", "review": true, "filter_ext": [ "mov", "mp4" ] }, - "plate": { + { + "family": "plate", "review": false, "filter_ext": [ "mov", "mp4" ] }, - "audio": { + { + "family": "audio", "review": false, "filter_ext": [ "wav" ] } - } + ] } } } \ No newline at end of file diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_traypublisher.json b/openpype/settings/entities/schemas/projects_schema/schema_project_traypublisher.json index 38597eeb97..4c0aaf41e7 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_traypublisher.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_traypublisher.json @@ -90,7 +90,7 @@ { "type": "dict", "collapsible": true, - "key": "editorialSimple", + "key": "editorial_simple", "label": "Editorial simple creator", "use_label_wrap": true, "collapsible_key": true, @@ -108,13 +108,22 @@ "type": "splitter" }, { - "key": "variants", - "label": "Variants", - "type": "dict-modifiable", - "highlight_content": true, + "type": "list", + "key": "family_presets", + "label": "Family presets", "object_type": { "type": "dict", "children": [ + { + "type": "enum", + "key": "family", + "label": "Family", + "enum_items": [ + {"reference": "reference"}, + {"plate": "plate"}, + {"audio": "audio"} + ] + }, { "type": "boolean", "key": "review", From 4b42e66c211a86d7e2e93fffe03b585e75103571 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 12 Jul 2022 15:55:30 +0200 Subject: [PATCH 242/785] trayp: adding `shot` instance --- .../plugins/create/create_editorial.py | 25 +++++++++++-------- 1 file changed, 14 insertions(+), 11 deletions(-) diff --git a/openpype/hosts/traypublisher/plugins/create/create_editorial.py b/openpype/hosts/traypublisher/plugins/create/create_editorial.py index c68c094218..6dbcf694cb 100644 --- a/openpype/hosts/traypublisher/plugins/create/create_editorial.py +++ b/openpype/hosts/traypublisher/plugins/create/create_editorial.py @@ -284,12 +284,6 @@ or updating already created. Publishing will create OTIO file. self.log.debug(f"__ family: {family}") self.log.debug(f"__ _vconf: {_vconf}") - families = ["clip"] - - # add review family if defined - if _vconf.get("review"): - families.append("review") - # subset name subset_name = "{}{}".format( family, variant_name.capitalize() @@ -304,7 +298,7 @@ or updating already created. Publishing will create OTIO file. "label": label, "variant": variant_name, "family": family, - "families": families, + "families": [], "subset": subset_name, # HACK: just for temporal bug workaround @@ -316,9 +310,6 @@ or updating already created. Publishing will create OTIO file. # parent time properties "trackStartFrame": track_start_frame, - # allowed file ext from settings - "filterExt": _vconf["filter_ext"], - # creator_attributes "creator_attributes": { "asset_name": clip_name, @@ -335,6 +326,16 @@ or updating already created. Publishing will create OTIO file. "sourceOut": source_out, } } + # add file extension filter only if it is not shot family + if family != "shot": + families = ["clip"] + # add review family if defined + if _vconf.get("review"): + families.append("review") + instance_data.update({ + "filterExt": _vconf["filter_ext"], + "families": families + }) c_instance = editorial_clip_creator.create( instance_data, {}) @@ -342,11 +343,13 @@ or updating already created. Publishing will create OTIO file. def _get_allowed_family_presets(self, pre_create_data): self.log.debug(f"__ pre_create_data: {pre_create_data}") - return { + return_dict = { preset["family"]: preset for preset in self._creator_settings["family_presets"] if pre_create_data[preset["family"]] } + return_dict["shot"] = {} + return return_dict def _validate_clip_for_processing(self, clip): if clip.name is None: From dada2f4831045370265037a8b1c01c43f4dd2f92 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 12 Jul 2022 16:06:35 +0200 Subject: [PATCH 243/785] added function which extract project name based on project id --- .../modules/kitsu/utils/update_op_with_zou.py | 19 ++++++++++++++++--- 1 file changed, 16 insertions(+), 3 deletions(-) diff --git a/openpype/modules/kitsu/utils/update_op_with_zou.py b/openpype/modules/kitsu/utils/update_op_with_zou.py index de74b0c677..c39d1c5e36 100644 --- a/openpype/modules/kitsu/utils/update_op_with_zou.py +++ b/openpype/modules/kitsu/utils/update_op_with_zou.py @@ -33,6 +33,20 @@ def create_op_asset(gazu_entity: dict) -> dict: } +def get_kitsu_project_name(project_id: str): + """Get project name based on project id in kitsu. + + Args: + project_id (str): Id of project in Kitsu. + + Returns: + str: Project name which has project in Kitsu. + """ + + project = gazu.project.get_project(project_id) + return project["name"] + + def set_op_project(dbcon: AvalonMongoDB, project_id: str): """Set project context. @@ -40,9 +54,8 @@ def set_op_project(dbcon: AvalonMongoDB, project_id: str): dbcon (AvalonMongoDB): Connection to DB project_id (str): Project zou ID """ - project = gazu.project.get_project(project_id) - project_name = project["name"] - dbcon.Session["AVALON_PROJECT"] = project_name + + dbcon.Session["AVALON_PROJECT"] = get_kitsu_project_name(project_id) def update_op_assets( From b4d11d4ae709fc65f41ec9c09cbf5e52a183677c Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 12 Jul 2022 16:07:10 +0200 Subject: [PATCH 244/785] use project name function to drop project collection --- openpype/modules/kitsu/utils/sync_service.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/openpype/modules/kitsu/utils/sync_service.py b/openpype/modules/kitsu/utils/sync_service.py index 577050c5af..677d269bca 100644 --- a/openpype/modules/kitsu/utils/sync_service.py +++ b/openpype/modules/kitsu/utils/sync_service.py @@ -7,6 +7,7 @@ from .credentials import validate_credentials from .update_op_with_zou import ( create_op_asset, set_op_project, + get_kitsu_project_name, write_project_to_op, update_op_assets, ) @@ -124,12 +125,11 @@ class Listener: def _delete_project(self, data): """Delete project.""" - project_doc = self.dbcon.find_one( - {"type": "project", "data.zou_id": data["project_id"]} - ) + + project_name = get_kitsu_project_name(data["project_id"]) # Delete project collection - self.dbcon.database[project_doc["name"]].drop() + self.dbcon.database[project_name].drop() # == Asset == From ee370cb7a315237608f3b60a6b6778d3fc900852 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 12 Jul 2022 16:07:40 +0200 Subject: [PATCH 245/785] change project in session instead of replacing AvalonMongoDB with pymongo.Collection --- openpype/modules/kitsu/utils/sync_service.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/modules/kitsu/utils/sync_service.py b/openpype/modules/kitsu/utils/sync_service.py index 677d269bca..d93197a4bc 100644 --- a/openpype/modules/kitsu/utils/sync_service.py +++ b/openpype/modules/kitsu/utils/sync_service.py @@ -120,7 +120,7 @@ class Listener: # Write into DB if update_project: - self.dbcon = self.dbcon.database[project_name] + self.dbcon.Session["AVALON_PROJECT"] = project_name self.dbcon.bulk_write([update_project]) def _delete_project(self, data): From e2920ffdb53df5dd8d68b8800436f19edca2ff9c Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 12 Jul 2022 16:08:15 +0200 Subject: [PATCH 246/785] use query functions in sync service --- openpype/modules/kitsu/utils/sync_service.py | 29 +++++++++++++------- 1 file changed, 19 insertions(+), 10 deletions(-) diff --git a/openpype/modules/kitsu/utils/sync_service.py b/openpype/modules/kitsu/utils/sync_service.py index d93197a4bc..38c1176df9 100644 --- a/openpype/modules/kitsu/utils/sync_service.py +++ b/openpype/modules/kitsu/utils/sync_service.py @@ -2,6 +2,10 @@ import os import gazu +from openpype.client import ( + get_project, + get_assets +) from openpype.pipeline import AvalonMongoDB from .credentials import validate_credentials from .update_op_with_zou import ( @@ -150,7 +154,8 @@ class Listener: def _update_asset(self, data): """Update asset into OP DB.""" set_op_project(self.dbcon, data["project_id"]) - project_doc = self.dbcon.find_one({"type": "project"}) + project_name = self.dbcon.active_project() + project_doc = get_project(project_name) # Get gazu entity asset = gazu.asset.get_asset(data["asset_id"]) @@ -159,7 +164,7 @@ class Listener: # Query all assets of the local project zou_ids_and_asset_docs = { asset_doc["data"]["zou"]["id"]: asset_doc - for asset_doc in self.dbcon.find({"type": "asset"}) + for asset_doc in get_assets(project_name) if asset_doc["data"].get("zou", {}).get("id") } zou_ids_and_asset_docs[asset["project_id"]] = project_doc @@ -199,7 +204,8 @@ class Listener: def _update_episode(self, data): """Update episode into OP DB.""" set_op_project(self.dbcon, data["project_id"]) - project_doc = self.dbcon.find_one({"type": "project"}) + project_name = self.dbcon.active_project() + project_doc = get_project(project_name) # Get gazu entity episode = gazu.shot.get_episode(data["episode_id"]) @@ -208,7 +214,7 @@ class Listener: # Query all assets of the local project zou_ids_and_asset_docs = { asset_doc["data"]["zou"]["id"]: asset_doc - for asset_doc in self.dbcon.find({"type": "asset"}) + for asset_doc in get_assets(project_name) if asset_doc["data"].get("zou", {}).get("id") } zou_ids_and_asset_docs[episode["project_id"]] = project_doc @@ -249,7 +255,8 @@ class Listener: def _update_sequence(self, data): """Update sequence into OP DB.""" set_op_project(self.dbcon, data["project_id"]) - project_doc = self.dbcon.find_one({"type": "project"}) + project_name = self.dbcon.active_project() + project_doc = get_project(project_name) # Get gazu entity sequence = gazu.shot.get_sequence(data["sequence_id"]) @@ -258,7 +265,7 @@ class Listener: # Query all assets of the local project zou_ids_and_asset_docs = { asset_doc["data"]["zou"]["id"]: asset_doc - for asset_doc in self.dbcon.find({"type": "asset"}) + for asset_doc in get_assets(project_name) if asset_doc["data"].get("zou", {}).get("id") } zou_ids_and_asset_docs[sequence["project_id"]] = project_doc @@ -299,7 +306,8 @@ class Listener: def _update_shot(self, data): """Update shot into OP DB.""" set_op_project(self.dbcon, data["project_id"]) - project_doc = self.dbcon.find_one({"type": "project"}) + project_name = self.dbcon.active_project() + project_doc = get_project(project_name) # Get gazu entity shot = gazu.shot.get_shot(data["shot_id"]) @@ -308,7 +316,7 @@ class Listener: # Query all assets of the local project zou_ids_and_asset_docs = { asset_doc["data"]["zou"]["id"]: asset_doc - for asset_doc in self.dbcon.find({"type": "asset"}) + for asset_doc in get_assets(project_name) if asset_doc["data"].get("zou", {}).get("id") } zou_ids_and_asset_docs[shot["project_id"]] = project_doc @@ -359,10 +367,11 @@ class Listener: def _delete_task(self, data): """Delete task of OP DB.""" - set_op_project(self.dbcon, data["project_id"]) + set_op_project(self.dbcon, data["project_id"]) + project_name = self.dbcon.active_project() # Find asset doc - asset_docs = [doc for doc in self.dbcon.find({"type": "asset"})] + asset_docs = list(get_assets(project_name)) for doc in asset_docs: # Match task for name, task in doc["data"]["tasks"].items(): From d393d6c8956af8365afe5309780762740ff0205c Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 12 Jul 2022 16:09:30 +0200 Subject: [PATCH 247/785] a little bit more complicated way how to get asset matching zou id --- openpype/modules/kitsu/utils/sync_service.py | 15 +++++++++++++-- 1 file changed, 13 insertions(+), 2 deletions(-) diff --git a/openpype/modules/kitsu/utils/sync_service.py b/openpype/modules/kitsu/utils/sync_service.py index 38c1176df9..3848eda7ae 100644 --- a/openpype/modules/kitsu/utils/sync_service.py +++ b/openpype/modules/kitsu/utils/sync_service.py @@ -343,14 +343,25 @@ class Listener: """Create new task into OP DB.""" # Get project entity set_op_project(self.dbcon, data["project_id"]) + project_name = self.dbcon.active_project() # Get gazu entity task = gazu.task.get_task(data["task_id"]) # Find asset doc - asset_doc = self.dbcon.find_one( - {"type": "asset", "data.zou.id": task["entity"]["id"]} + parent_name = task["entity"]["name"] + parent_zou_id = task["entity"]["id"] + asset_docs = get_assets( + project_name, + asset_names=[parent_name], + fields=["_id", "data.zou.id", "data.tasks"] ) + asset_doc = None + for _asset_doc in asset_docs: + doc_zou_id = _asset_doc.get("data", {}).get("zou", {}).get("id") + if doc_zou_id == parent_zou_id: + asset_doc = _asset_doc + break # Update asset tasks with new one asset_tasks = asset_doc["data"].get("tasks") From 20c2cedf75436e463cc6799ab1f3bcb579116abc Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 12 Jul 2022 16:09:47 +0200 Subject: [PATCH 248/785] use query functions in op to zou sync --- openpype/modules/kitsu/utils/update_zou_with_op.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/openpype/modules/kitsu/utils/update_zou_with_op.py b/openpype/modules/kitsu/utils/update_zou_with_op.py index 81d421206f..b7bc418c98 100644 --- a/openpype/modules/kitsu/utils/update_zou_with_op.py +++ b/openpype/modules/kitsu/utils/update_zou_with_op.py @@ -6,6 +6,7 @@ from typing import List import gazu from pymongo import UpdateOne +from openpype.client import get_project from openpype.pipeline import AvalonMongoDB from openpype.api import get_project_settings from openpype.modules.kitsu.utils.credentials import validate_credentials @@ -53,9 +54,7 @@ def sync_zou_from_op_project( """ # Get project doc if not provided if not project_doc: - project_doc = dbcon.database[project_name].find_one( - {"type": "project"} - ) + project_doc = get_project(project_name) # Get all entities from zou print(f"Synchronizing {project_name}...") @@ -96,7 +95,7 @@ def sync_zou_from_op_project( dbcon.Session["AVALON_PROJECT"] = project_name asset_docs = { asset_doc["_id"]: asset_doc - for asset_doc in dbcon.find({"type": "asset"}) + for asset_doc in get_assets(project_name) } # Create new assets From 96863fa8aed63dee0b72f6a38e532525f5b6863d Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 12 Jul 2022 16:10:37 +0200 Subject: [PATCH 249/785] use query functions in zou to op sync --- .../modules/kitsu/utils/update_op_with_zou.py | 23 +++++++++++-------- 1 file changed, 14 insertions(+), 9 deletions(-) diff --git a/openpype/modules/kitsu/utils/update_op_with_zou.py b/openpype/modules/kitsu/utils/update_op_with_zou.py index c39d1c5e36..86dee3ce65 100644 --- a/openpype/modules/kitsu/utils/update_op_with_zou.py +++ b/openpype/modules/kitsu/utils/update_op_with_zou.py @@ -10,6 +10,12 @@ from gazu.task import ( all_tasks_for_shot, ) +from openpype.client import ( + get_project, + get_assets, + get_asset_by_id, + get_asset_by_name +) from openpype.pipeline import AvalonMongoDB from openpype.api import get_project_settings from openpype.lib import create_project @@ -85,9 +91,7 @@ def update_op_assets( if not item_doc: # Create asset op_asset = create_op_asset(item) insert_result = dbcon.insert_one(op_asset) - item_doc = dbcon.find_one( - {"type": "asset", "_id": insert_result.inserted_id} - ) + item_doc = get_asset_by_id(project_name, insert_result.inserted_id) # Update asset item_data = deepcopy(item_doc["data"]) @@ -235,7 +239,7 @@ def write_project_to_op(project: dict, dbcon: AvalonMongoDB) -> UpdateOne: UpdateOne: Update instance for the project """ project_name = project["name"] - project_doc = dbcon.database[project_name].find_one({"type": "project"}) + project_doc = get_project(project_name) if not project_doc: print(f"Creating project '{project_name}'") project_doc = create_project(project_name, project_name, dbcon=dbcon) @@ -332,19 +336,20 @@ def sync_project_from_kitsu(dbcon: AvalonMongoDB, project: dict): bulk_writes.append(write_project_to_op(project, dbcon)) # Try to find project document - dbcon.Session["AVALON_PROJECT"] = project["name"] - project_doc = dbcon.find_one({"type": "project"}) + project_name = project["name"] + dbcon.Session["AVALON_PROJECT"] = project_name + project_doc = get_project(project_name) # Query all assets of the local project zou_ids_and_asset_docs = { asset_doc["data"]["zou"]["id"]: asset_doc - for asset_doc in dbcon.find({"type": "asset"}) + for asset_doc in get_assets(project_name) if asset_doc["data"].get("zou", {}).get("id") } zou_ids_and_asset_docs[project["id"]] = project_doc # Create entities root folders - project_module_settings = get_project_settings(project["name"])["kitsu"] + project_module_settings = get_project_settings(project_name)["kitsu"] for entity_type, root in project_module_settings["entities_root"].items(): parent_folders = root.split("/") direct_parent_doc = None @@ -384,7 +389,7 @@ def sync_project_from_kitsu(dbcon: AvalonMongoDB, project: dict): zou_ids_and_asset_docs.update( { asset_doc["data"]["zou"]["id"]: asset_doc - for asset_doc in dbcon.find({"type": "asset"}) + for asset_doc in get_assets(projec_name) if asset_doc["data"].get("zou") } ) From 767dc3dbaba27063289c6853c306acdda8493869 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 12 Jul 2022 16:11:26 +0200 Subject: [PATCH 250/785] a little bit more complicated queries using zou id --- .../modules/kitsu/utils/update_op_with_zou.py | 53 +++++++++++++++---- 1 file changed, 44 insertions(+), 9 deletions(-) diff --git a/openpype/modules/kitsu/utils/update_op_with_zou.py b/openpype/modules/kitsu/utils/update_op_with_zou.py index 86dee3ce65..bf3705447c 100644 --- a/openpype/modules/kitsu/utils/update_op_with_zou.py +++ b/openpype/modules/kitsu/utils/update_op_with_zou.py @@ -179,14 +179,32 @@ def update_op_assets( ) if visual_parent_doc_id is None: # Find root folder doc - root_folder_doc = dbcon.find_one( - { - "type": "asset", - "name": entity_parent_folders[-1], - "data.root_of": substitute_item_type, - }, - ["_id"], + root_folder_docs = get_assets( + project_name, + asset_name=[entity_parent_folders[-1]], + fields=["_id", "data.root_of"] ) + # NOTE: Not sure why it's checking for entity type? + # OP3 does not support multiple assets with same names so type + # filtering is irelevant. + # This way mimics previous implementation: + # ``` + # root_folder_doc = dbcon.find_one( + # { + # "type": "asset", + # "name": entity_parent_folders[-1], + # "data.root_of": substitute_item_type, + # }, + # ["_id"], + # ) + # ``` + root_folder_doc = None + for folder_doc in root_folder_docs: + root_of = folder_doc.get("data", {}).get("root_of") + if root_of == substitute_item_type: + root_folder_doc = folder_doc + break + if root_folder_doc: visual_parent_doc_id = root_folder_doc["_id"] @@ -354,9 +372,26 @@ def sync_project_from_kitsu(dbcon: AvalonMongoDB, project: dict): parent_folders = root.split("/") direct_parent_doc = None for i, folder in enumerate(parent_folders, 1): - parent_doc = dbcon.find_one( - {"type": "asset", "name": folder, "data.root_of": entity_type} + parent_doc = get_asset_by_name( + project_name, folder, fields=["_id", "data.root_of"] ) + # NOTE: Not sure why it's checking for entity type? + # OP3 does not support multiple assets with same names so type + # filtering is irelevant. + # Also all of the entities could find be queried at once using + # 'get_assets'. + # This way mimics previous implementation: + # ``` + # parent_doc = dbcon.find_one( + # {"type": "asset", "name": folder, "data.root_of": entity_type} + # ) + # ``` + if ( + parent_doc + and parent_doc.get("data", {}).get("root_of") != entity_type + ): + parent_doc = None + if not parent_doc: direct_parent_doc = dbcon.insert_one( { From 498ed608ebce7d639fcc091da979307f0f21da62 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 12 Jul 2022 16:24:22 +0200 Subject: [PATCH 251/785] nuke: setting loader option defaults from settings --- openpype/hosts/nuke/plugins/load/load_clip.py | 36 ++++++++------- .../defaults/project_settings/nuke.json | 6 ++- .../schemas/schema_nuke_load.json | 46 ++++++++++++++++++- 3 files changed, 68 insertions(+), 20 deletions(-) diff --git a/openpype/hosts/nuke/plugins/load/load_clip.py b/openpype/hosts/nuke/plugins/load/load_clip.py index e9530c58c0..43dd5a66eb 100644 --- a/openpype/hosts/nuke/plugins/load/load_clip.py +++ b/openpype/hosts/nuke/plugins/load/load_clip.py @@ -54,26 +54,28 @@ class LoadClip(plugin.NukeLoader): script_start = int(nuke.root()["first_frame"].value()) # option gui - defaults = { + options_defaults = { "start_at_workfile": True, "add_retime": True } - options = [ - qargparse.Boolean( - "start_at_workfile", - help="Load at workfile start frame", - default=True - ), - qargparse.Boolean( - "add_retime", - help="Load with retime", - default=True - ) - ] - node_name_template = "{class_name}_{ext}" + @classmethod + def get_options(cls, *args): + return [ + qargparse.Boolean( + "start_at_workfile", + help="Load at workfile start frame", + default=cls.options_defaults["start_at_workfile"] + ), + qargparse.Boolean( + "add_retime", + help="Load with retime", + default=cls.options_defaults["add_retime"] + ) + ] + @classmethod def get_representations(cls): return ( @@ -92,10 +94,10 @@ class LoadClip(plugin.NukeLoader): file = self.fname.replace("\\", "/") start_at_workfile = options.get( - "start_at_workfile", self.defaults["start_at_workfile"]) + "start_at_workfile", self.options_defaults["start_at_workfile"]) add_retime = options.get( - "add_retime", self.defaults["add_retime"]) + "add_retime", self.options_defaults["add_retime"]) version = context['version'] version_data = version.get("data", {}) @@ -214,7 +216,7 @@ class LoadClip(plugin.NukeLoader): # TODO: find `addRetime` add openpipe data # add_retime = options.get( - # "add_retime", self.defaults["add_retime"]) + # "add_retime", self.options_defaults["add_retime"]) project_name = legacy_io.active_project() version_doc = get_version_by_id(project_name, representation["parent"]) diff --git a/openpype/settings/defaults/project_settings/nuke.json b/openpype/settings/defaults/project_settings/nuke.json index 6c45e2a9c1..3e29122074 100644 --- a/openpype/settings/defaults/project_settings/nuke.json +++ b/openpype/settings/defaults/project_settings/nuke.json @@ -287,7 +287,11 @@ "LoadClip": { "enabled": true, "_representations": [], - "node_name_template": "{class_name}_{ext}" + "node_name_template": "{class_name}_{ext}", + "options_defaults": { + "start_at_workfile": true, + "add_retime": true + } } }, "workfile_builder": { diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_nuke_load.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_nuke_load.json index 5bd8337e4c..805424c632 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_nuke_load.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_nuke_load.json @@ -11,10 +11,52 @@ { "key": "LoadImage", "label": "Image Loader" + } + ] + }, + { + "type": "dict", + "collapsible": true, + "key": "LoadClip", + "label": "Clip Loader", + "checkbox_key": "enabled", + "children": [ + { + "type": "boolean", + "key": "enabled", + "label": "Enabled" }, { - "key": "LoadClip", - "label": "Clip Loader" + "type": "list", + "key": "_representations", + "label": "Representations", + "object_type": "text" + }, + { + "type": "text", + "key": "node_name_template", + "label": "Node name template" + }, + { + "type": "splitter" + }, + { + "type": "dict", + "collapsible": false, + "key": "options_defaults", + "label": "Loader option defaults", + "children": [ + { + "type": "boolean", + "key": "start_at_workfile", + "label": "Start at worfile beggining" + }, + { + "type": "boolean", + "key": "add_retime", + "label": "Add retime" + } + ] } ] } From d04e95e28f9a206c4a7390a7a1c82f418736b7c9 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 12 Jul 2022 16:48:39 +0200 Subject: [PATCH 252/785] nuke: updating clip with retime options --- openpype/hosts/nuke/plugins/load/load_clip.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/nuke/plugins/load/load_clip.py b/openpype/hosts/nuke/plugins/load/load_clip.py index 43dd5a66eb..b2dc4a52d7 100644 --- a/openpype/hosts/nuke/plugins/load/load_clip.py +++ b/openpype/hosts/nuke/plugins/load/load_clip.py @@ -212,11 +212,12 @@ class LoadClip(plugin.NukeLoader): read_node = nuke.toNode(container['objectName']) file = get_representation_path(representation).replace("\\", "/") - start_at_workfile = bool("start at" in read_node['frame_mode'].value()) + start_at_workfile = "start at" in read_node['frame_mode'].value() - # TODO: find `addRetime` add openpipe data - # add_retime = options.get( - # "add_retime", self.options_defaults["add_retime"]) + add_retime = [ + key for key in read_node.knobs().keys() + if "addRetime" in key + ] project_name = legacy_io.active_project() version_doc = get_version_by_id(project_name, representation["parent"]) From 2988af04ffcd1a42a4ca40fe4a2dbe80f163ffff Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 12 Jul 2022 16:57:49 +0200 Subject: [PATCH 253/785] added missing import --- openpype/modules/kitsu/utils/update_zou_with_op.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/modules/kitsu/utils/update_zou_with_op.py b/openpype/modules/kitsu/utils/update_zou_with_op.py index b7bc418c98..57d7094e95 100644 --- a/openpype/modules/kitsu/utils/update_zou_with_op.py +++ b/openpype/modules/kitsu/utils/update_zou_with_op.py @@ -6,7 +6,7 @@ from typing import List import gazu from pymongo import UpdateOne -from openpype.client import get_project +from openpype.client import get_project, get_assets from openpype.pipeline import AvalonMongoDB from openpype.api import get_project_settings from openpype.modules.kitsu.utils.credentials import validate_credentials From 5e14a54d8248b7ddb3f02b778d89ca4a391f15c8 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 12 Jul 2022 17:02:54 +0200 Subject: [PATCH 254/785] fix typo --- openpype/modules/kitsu/utils/update_op_with_zou.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/modules/kitsu/utils/update_op_with_zou.py b/openpype/modules/kitsu/utils/update_op_with_zou.py index bf3705447c..f56a131b8e 100644 --- a/openpype/modules/kitsu/utils/update_op_with_zou.py +++ b/openpype/modules/kitsu/utils/update_op_with_zou.py @@ -424,7 +424,7 @@ def sync_project_from_kitsu(dbcon: AvalonMongoDB, project: dict): zou_ids_and_asset_docs.update( { asset_doc["data"]["zou"]["id"]: asset_doc - for asset_doc in get_assets(projec_name) + for asset_doc in get_assets(project_name) if asset_doc["data"].get("zou") } ) From 9d4d8873358bcb6fde08e46817f47a09d858c68d Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 12 Jul 2022 17:13:50 +0200 Subject: [PATCH 255/785] keep mismatch target plugins in report --- openpype/pipeline/create/context.py | 10 ++++++++++ openpype/tools/publisher/control.py | 15 +++++++++------ 2 files changed, 19 insertions(+), 6 deletions(-) diff --git a/openpype/pipeline/create/context.py b/openpype/pipeline/create/context.py index 7f0341c127..ac345ea47b 100644 --- a/openpype/pipeline/create/context.py +++ b/openpype/pipeline/create/context.py @@ -709,6 +709,7 @@ class CreateContext: self.manual_creators = {} self.publish_discover_result = None + self.publish_plugins_mismatch_targets = [] self.publish_plugins = [] self.plugins_with_defs = [] self._attr_plugins_by_family = {} @@ -831,6 +832,7 @@ class CreateContext: discover_result = DiscoverResult() plugins_with_defs = [] plugins_by_targets = [] + plugins_mismatch_targets = [] if discover_publish_plugins: discover_result = publish_plugins_discover() publish_plugins = discover_result.plugins @@ -840,11 +842,19 @@ class CreateContext: plugins_by_targets = pyblish.logic.plugins_by_targets( publish_plugins, list(targets) ) + # Collect plugins that can have attribute definitions for plugin in publish_plugins: if OpenPypePyblishPluginMixin in inspect.getmro(plugin): plugins_with_defs.append(plugin) + plugins_mismatch_targets = [ + plugin + for plugin in publish_plugins + if plugin not in plugins_by_targets + ] + + self.publish_plugins_mismatch_targets = plugins_mismatch_targets self.publish_discover_result = discover_result self.publish_plugins = plugins_by_targets self.plugins_with_defs = plugins_with_defs diff --git a/openpype/tools/publisher/control.py b/openpype/tools/publisher/control.py index 915fb7f32e..f692bb4000 100644 --- a/openpype/tools/publisher/control.py +++ b/openpype/tools/publisher/control.py @@ -154,15 +154,20 @@ class PublishReport: self._all_instances_by_id = {} self._current_context = None - def reset(self, context, publish_discover_result=None): + def reset(self, context, create_context): """Reset report and clear all data.""" - self._publish_discover_result = publish_discover_result + + self._publish_discover_result = create_context.publish_discover_result self._plugin_data = [] self._plugin_data_with_plugin = [] self._current_plugin_data = {} self._all_instances_by_id = {} self._current_context = context + for plugin in create_context.publish_plugins_mismatch_targets: + plugin_data = self._add_plugin_data_item(plugin) + plugin_data["skipped"] = True + def add_plugin_iter(self, plugin, context): """Add report about single iteration of plugin.""" for instance in context: @@ -205,6 +210,7 @@ class PublishReport: "name": plugin.__name__, "label": label, "order": plugin.order, + "targets": list(plugin.targets), "instances_data": [], "actions_data": [], "skipped": False, @@ -777,10 +783,7 @@ class PublisherController: # - pop the key after first collector using it would be safest option? self._publish_context.data["create_context"] = self.create_context - self._publish_report.reset( - self._publish_context, - self.create_context.publish_discover_result - ) + self._publish_report.reset(self._publish_context, self.create_context) self._publish_validation_errors = [] self._publish_current_plugin_validation_errors = None self._publish_error = None From 8a951ee60f837dbce81442574f922691707ce614 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 12 Jul 2022 17:27:47 +0200 Subject: [PATCH 256/785] removed GlobalEvent --- openpype/lib/events.py | 9 +-------- 1 file changed, 1 insertion(+), 8 deletions(-) diff --git a/openpype/lib/events.py b/openpype/lib/events.py index 3762cec9f9..4a7d648a7e 100644 --- a/openpype/lib/events.py +++ b/openpype/lib/events.py @@ -253,13 +253,6 @@ class EventSystem(object): self._registered_callbacks.remove(callback) -class GlobalEvent(Event): - def __init__(self, topic, data=None, source=None): - event_system = GlobalEventSystem.get_global_event_system() - - super(GlobalEvent, self).__init__(topic, data, source, event_system) - - class GlobalEventSystem: _global_event_system = None @@ -276,7 +269,7 @@ class GlobalEventSystem: @classmethod def emit(cls, topic, data, source): - event = GlobalEvent(topic, data, source) + event = Event(topic, data, source, cls.get_global_event_system()) event.emit() return event From 8ab6b41db5689312c39e07426fda869d9c2cc421 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 12 Jul 2022 17:34:51 +0200 Subject: [PATCH 257/785] simplified global event emit --- openpype/lib/events.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/openpype/lib/events.py b/openpype/lib/events.py index 4a7d648a7e..215e36bc4e 100644 --- a/openpype/lib/events.py +++ b/openpype/lib/events.py @@ -180,7 +180,10 @@ class Event(object): topic (str): Identifier of event. data (Any): Data specific for event. Dictionary is recommended. source (str): Identifier of source. + event_system (EventSystem): Event system in which can be event + triggered. """ + _data = {} def __init__(self, topic, data=None, source=None, event_system=None): @@ -269,9 +272,8 @@ class GlobalEventSystem: @classmethod def emit(cls, topic, data, source): - event = Event(topic, data, source, cls.get_global_event_system()) - event.emit() - return event + event_system = cls.get_global_event_system() + return event_system.emit(topic, data, source) def register_event_callback(topic, callback): From 804bb9b3382970088581f07f2a3492f0fa59bfe7 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 12 Jul 2022 17:48:55 +0200 Subject: [PATCH 258/785] fix group accessing --- openpype/tools/publisher/widgets/card_view_widgets.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/openpype/tools/publisher/widgets/card_view_widgets.py b/openpype/tools/publisher/widgets/card_view_widgets.py index b6fcee7edb..fc8bb2af10 100644 --- a/openpype/tools/publisher/widgets/card_view_widgets.py +++ b/openpype/tools/publisher/widgets/card_view_widgets.py @@ -98,6 +98,7 @@ class GroupWidget(QtWidgets.QWidget): instances(list): List of instances in CreateContext. """ + # Store instances by id and by subset name instances_by_id = {} instances_by_subset_name = collections.defaultdict(list) @@ -142,6 +143,7 @@ class GroupWidget(QtWidgets.QWidget): class CardWidget(BaseClickableFrame): """Clickable card used as bigger button.""" + selected = QtCore.Signal(str, str) # Group identifier of card # - this must be set because if send when mouse is released with card id @@ -178,6 +180,7 @@ class ContextCardWidget(CardWidget): Is not visually under group widget and is always at the top of card view. """ + def __init__(self, parent): super(ContextCardWidget, self).__init__(parent) @@ -204,13 +207,14 @@ class ContextCardWidget(CardWidget): class InstanceCardWidget(CardWidget): """Card widget representing instance.""" + active_changed = QtCore.Signal() def __init__(self, instance, group_icon, parent): super(InstanceCardWidget, self).__init__(parent) self._id = instance.id - self._group_identifier = instance.creator_label + self._group_identifier = instance.group_label self._group_icon = group_icon self.instance = instance From fde803e6ef4de6c33503e6dc8cc785ae2a9d9649 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 12 Jul 2022 17:49:10 +0200 Subject: [PATCH 259/785] set line edit on comboboxes --- openpype/widgets/attribute_defs/widgets.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/openpype/widgets/attribute_defs/widgets.py b/openpype/widgets/attribute_defs/widgets.py index b6493b80a8..63d40e2df1 100644 --- a/openpype/widgets/attribute_defs/widgets.py +++ b/openpype/widgets/attribute_defs/widgets.py @@ -374,6 +374,10 @@ class EnumAttrWidget(_BaseAttrDefWidget): combo_delegate = QtWidgets.QStyledItemDelegate(input_widget) input_widget.setItemDelegate(combo_delegate) + line_edit = QtWidgets.QLineEdit(input_widget) + line_edit.setReadOnly(True) + input_widget.setLineEdit(line_edit) + if self.attr_def.tooltip: input_widget.setToolTip(self.attr_def.tooltip) @@ -408,7 +412,8 @@ class EnumAttrWidget(_BaseAttrDefWidget): self._input_widget.setCurrentIndex(idx) else: - self._input_widget.lineEdit().setText("Multiselection") + line_edit = self._input_widget.lineEdit() + line_edit.setText("Multiselection") class UnknownAttrWidget(_BaseAttrDefWidget): From bfd565e29d40f0daca2ddbdb01d036d9dc6b543a Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 12 Jul 2022 17:57:07 +0200 Subject: [PATCH 260/785] OP-3446 - add traypublisher to host enum --- openpype/settings/entities/enum_entity.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/settings/entities/enum_entity.py b/openpype/settings/entities/enum_entity.py index 92a397afba..03998677ce 100644 --- a/openpype/settings/entities/enum_entity.py +++ b/openpype/settings/entities/enum_entity.py @@ -169,6 +169,7 @@ class HostsEnumEntity(BaseEnumEntity): "tvpaint", "unreal", "standalonepublisher", + "traypublisher", "webpublisher" ] From 4fe4ac163e96d7a08cce31d30a8d8fdb85b35cc1 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 12 Jul 2022 17:57:47 +0200 Subject: [PATCH 261/785] OP-3446 - add traypublisher ftrack setting --- .../defaults/project_settings/ftrack.json | 43 +++++++++++++++++++ 1 file changed, 43 insertions(+) diff --git a/openpype/settings/defaults/project_settings/ftrack.json b/openpype/settings/defaults/project_settings/ftrack.json index 831c34835e..eb90778353 100644 --- a/openpype/settings/defaults/project_settings/ftrack.json +++ b/openpype/settings/defaults/project_settings/ftrack.json @@ -268,6 +268,49 @@ } ] }, + { + "hosts": [ + "traypublisher" + ], + "families": [], + "task_types": [], + "tasks": [], + "add_ftrack_family": true, + "advanced_filtering": [] + }, + { + "hosts": [ + "traypublisher" + ], + "families": [ + "matchmove", + "shot" + ], + "task_types": [], + "tasks": [], + "add_ftrack_family": false, + "advanced_filtering": [] + }, + { + "hosts": [ + "traypublisher" + ], + "families": [ + "plate" + ], + "task_types": [], + "tasks": [], + "add_ftrack_family": false, + "advanced_filtering": [ + { + "families": [ + "clip", + "review" + ], + "add_ftrack_family": true + } + ] + }, { "hosts": [ "maya" From cec639cf7ed62ed28b37c4839752e72d0e845e9b Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 12 Jul 2022 18:04:55 +0200 Subject: [PATCH 262/785] OP-3446 - updated settings for TrayPublisher --- .../project_settings/traypublisher.json | 6 --- .../schema_project_traypublisher.json | 39 ------------------- 2 files changed, 45 deletions(-) diff --git a/openpype/settings/defaults/project_settings/traypublisher.json b/openpype/settings/defaults/project_settings/traypublisher.json index 36526d01b0..cb3d3d1d1a 100644 --- a/openpype/settings/defaults/project_settings/traypublisher.json +++ b/openpype/settings/defaults/project_settings/traypublisher.json @@ -33,13 +33,7 @@ } ], "BatchMovCreator": { - "family": "render_mov_batch", - "identifier": "", - "label": "Batch Mov", - "icon": "fa.file", "default_variants": ["Main"], - "description": "", - "detailed_description": "", "default_tasks": ["Compositing"], "extensions": [ ".mov" diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_traypublisher.json b/openpype/settings/entities/schemas/projects_schema/schema_project_traypublisher.json index 308883d46f..d4ad57767a 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_traypublisher.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_traypublisher.json @@ -87,28 +87,6 @@ "use_label_wrap": true, "collapsible_key": true, "children": [ - { - "type": "text", - "key": "family", - "label": "Family" - }, - { - "type": "text", - "key": "identifier", - "label": "Identifier", - "placeholder": "< Use 'Family' >", - "tooltip": "All creators must have unique identifier.\nBy default is used 'family' but if you need to have more creators with same families\nyou have to set identifier too." - }, - { - "type": "text", - "key": "label", - "label": "Label" - }, - { - "type": "text", - "key": "icon", - "label": "Icon" - }, { "type": "list", "key": "default_variants", @@ -117,23 +95,6 @@ "type": "text" } }, - { - "type": "separator" - }, - { - "type": "text", - "key": "description", - "label": "Description" - }, - { - "type": "text", - "key": "detailed_description", - "label": "Detailed Description", - "multiline": true - }, - { - "type": "separator" - }, { "type": "list", "key": "default_tasks", From a3e48b558e9a82334454ae4678a04b860bc9e6ab Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 12 Jul 2022 18:05:42 +0200 Subject: [PATCH 263/785] trayp: has parent on instance data --- .../plugins/create/create_editorial.py | 54 ++++++++++++------- 1 file changed, 36 insertions(+), 18 deletions(-) diff --git a/openpype/hosts/traypublisher/plugins/create/create_editorial.py b/openpype/hosts/traypublisher/plugins/create/create_editorial.py index 6dbcf694cb..d0ce7fa452 100644 --- a/openpype/hosts/traypublisher/plugins/create/create_editorial.py +++ b/openpype/hosts/traypublisher/plugins/create/create_editorial.py @@ -61,6 +61,8 @@ class EditorialClipInstanceCreator(InvisibleTrayPublishCreator): host_name = "traypublisher" label = "Editorial Clip" + has_parent = False + def __init__( self, project_settings, *args, **kwargs ): @@ -69,6 +71,8 @@ class EditorialClipInstanceCreator(InvisibleTrayPublishCreator): ) def create(self, instance_data, source_data): + self.has_parent = source_data.get("has_parent") + self.log.info(f"instance_data: {instance_data}") subset_name = instance_data["subset"] family = instance_data["family"] @@ -95,7 +99,8 @@ class EditorialClipInstanceCreator(InvisibleTrayPublishCreator): label="Asset name", ) ] - attr_defs.extend(CLIP_ATTR_DEFS) + if not self.has_parent: + attr_defs.extend(CLIP_ATTR_DEFS) return attr_defs @@ -131,7 +136,8 @@ or updating already created. Publishing will create OTIO file. self.default_variants = self._creator_settings["default_variants"] def create(self, subset_name, instance_data, pre_create_data): - allowed_variants = self._get_allowed_family_presets(pre_create_data) + allowed_family_presets = self._get_allowed_family_presets( + pre_create_data) clip_instance_properties = { k: v for k, v in pre_create_data.items() @@ -169,7 +175,7 @@ or updating already created. Publishing will create OTIO file. otio_timeline, clip_instance_properties, variant_name=instance_data["variant"], - variants=allowed_variants + family_presets=allowed_family_presets ) @@ -211,7 +217,7 @@ or updating already created. Publishing will create OTIO file. otio_timeline, clip_instance_properties, variant_name, - variants + family_presets ): # get clip instance properties parent_asset_name = clip_instance_properties["parent_asset_name"] @@ -280,9 +286,12 @@ or updating already created. Publishing will create OTIO file. ) frame_end = frame_start + (clip_duration - 1) - for family, _vconf in variants.items(): + parent_instance_label = None + for _fpreset in family_presets: + source_data = {} + family = _fpreset["family"] self.log.debug(f"__ family: {family}") - self.log.debug(f"__ _vconf: {_vconf}") + self.log.debug(f"__ _fpreset: {_fpreset}") # subset name subset_name = "{}{}".format( @@ -299,6 +308,7 @@ or updating already created. Publishing will create OTIO file. "variant": variant_name, "family": family, "families": [], + "group": family.capitalize(), "subset": subset_name, # HACK: just for temporal bug workaround @@ -327,29 +337,37 @@ or updating already created. Publishing will create OTIO file. } } # add file extension filter only if it is not shot family - if family != "shot": + if family == "shot": + parent_instance_label = label + source_data + else: families = ["clip"] # add review family if defined - if _vconf.get("review"): + if _fpreset.get("review"): families.append("review") instance_data.update({ - "filterExt": _vconf["filter_ext"], - "families": families + "filterExt": _fpreset["filter_ext"], + "families": families, + "creator_attributes": { + "asset_name": clip_name, + "parent_instance": parent_instance_label + } }) + source_data["has_parent"] = True c_instance = editorial_clip_creator.create( - instance_data, {}) + instance_data, source_data) self.log.debug(f"{pformat(dict(c_instance.data))}") def _get_allowed_family_presets(self, pre_create_data): self.log.debug(f"__ pre_create_data: {pre_create_data}") - return_dict = { - preset["family"]: preset - for preset in self._creator_settings["family_presets"] - if pre_create_data[preset["family"]] - } - return_dict["shot"] = {} - return return_dict + return [ + {"family": "shot"}, + *[ + preset for preset in self._creator_settings["family_presets"] + if pre_create_data[preset["family"]] + ] + ] def _validate_clip_for_processing(self, clip): if clip.name is None: From 018896f9239f3fb3a036512a892a8f48593dca85 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 12 Jul 2022 18:28:03 +0200 Subject: [PATCH 264/785] added docstrings --- openpype/lib/events.py | 57 ++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 57 insertions(+) diff --git a/openpype/lib/events.py b/openpype/lib/events.py index 215e36bc4e..301d62e2a6 100644 --- a/openpype/lib/events.py +++ b/openpype/lib/events.py @@ -229,23 +229,74 @@ class Event(object): class EventSystem(object): + """Encapsulate event handling into an object. + + System wraps registered callbacks and triggered events into single object + so it is possible to create mutltiple independent systems that have their + topics and callbacks. + + + """ + def __init__(self): self._registered_callbacks = [] def add_callback(self, topic, callback): + """Register callback in event system. + + Args: + topic (str): Topic for EventCallback. + callback (Callable): Function or method that will be called + when topic is triggered. + + Returns: + EventCallback: Created callback object which can be used to + stop listening. + """ + callback = EventCallback(topic, callback) self._registered_callbacks.append(callback) return callback def create_event(self, topic, data, source): + """Create new event which is bound to event system. + + Args: + topic (str): Event topic. + data (dict): Data related to event. + source (str): Source of event. + + Returns: + Event: Object of event. + """ + return Event(topic, data, source, self) def emit(self, topic, data, source): + """Create event based on passed data and emit it. + + This is easiest way how to trigger event in an event system. + + Args: + topic (str): Event topic. + data (dict): Data related to event. + source (str): Source of event. + + Returns: + Event: Created and emitted event. + """ + event = self.create_event(topic, data, source) event.emit() return event def emit_event(self, event): + """Emit event object. + + Args: + event (Event): Prepared event with topic and data. + """ + invalid_callbacks = [] for callback in self._registered_callbacks: callback.process_event(event) @@ -257,6 +308,12 @@ class EventSystem(object): class GlobalEventSystem: + """Event system living in global scope of process. + + This is primarily used in host implementation to trigger events + related to DCC changes or changes of context in the host implementation. + """ + _global_event_system = None @classmethod From e8f30eea9a8914ed09360d56bcd7b2a60d8367fc Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Tue, 12 Jul 2022 18:30:46 +0200 Subject: [PATCH 265/785] Added typing notation MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Félix David --- openpype/modules/kitsu/utils/update_op_with_zou.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/modules/kitsu/utils/update_op_with_zou.py b/openpype/modules/kitsu/utils/update_op_with_zou.py index f56a131b8e..a68d6d31c3 100644 --- a/openpype/modules/kitsu/utils/update_op_with_zou.py +++ b/openpype/modules/kitsu/utils/update_op_with_zou.py @@ -39,7 +39,7 @@ def create_op_asset(gazu_entity: dict) -> dict: } -def get_kitsu_project_name(project_id: str): +def get_kitsu_project_name(project_id: str)->str: """Get project name based on project id in kitsu. Args: From 77ffca938a7cd4a3600dd8167bf4e2f346c11fd7 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 12 Jul 2022 18:53:58 +0200 Subject: [PATCH 266/785] make enum line edit transparent for mouse --- openpype/widgets/attribute_defs/widgets.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/widgets/attribute_defs/widgets.py b/openpype/widgets/attribute_defs/widgets.py index 63d40e2df1..7a7035317b 100644 --- a/openpype/widgets/attribute_defs/widgets.py +++ b/openpype/widgets/attribute_defs/widgets.py @@ -376,6 +376,7 @@ class EnumAttrWidget(_BaseAttrDefWidget): line_edit = QtWidgets.QLineEdit(input_widget) line_edit.setReadOnly(True) + line_edit.setAttribute(QtCore.Qt.WA_TransparentForMouseEvents) input_widget.setLineEdit(line_edit) if self.attr_def.tooltip: @@ -413,7 +414,7 @@ class EnumAttrWidget(_BaseAttrDefWidget): else: line_edit = self._input_widget.lineEdit() - line_edit.setText("Multiselection") + line_edit.setText("< Multiselection> ") class UnknownAttrWidget(_BaseAttrDefWidget): From 81469cbc54126487a1d3e35d78d36294a966ed14 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 12 Jul 2022 23:25:44 +0200 Subject: [PATCH 267/785] implemented combobox that can have custom text --- openpype/tools/utils/__init__.py | 2 ++ openpype/tools/utils/widgets.py | 22 ++++++++++++++++++++++ 2 files changed, 24 insertions(+) diff --git a/openpype/tools/utils/__init__.py b/openpype/tools/utils/__init__.py index 0f367510bd..5ccc1b40b3 100644 --- a/openpype/tools/utils/__init__.py +++ b/openpype/tools/utils/__init__.py @@ -1,4 +1,5 @@ from .widgets import ( + CustomTextComboBox, PlaceholderLineEdit, BaseClickableFrame, ClickableFrame, @@ -28,6 +29,7 @@ from .overlay_messages import ( __all__ = ( + "CustomTextComboBox", "PlaceholderLineEdit", "BaseClickableFrame", "ClickableFrame", diff --git a/openpype/tools/utils/widgets.py b/openpype/tools/utils/widgets.py index d5ae909be8..df0d349822 100644 --- a/openpype/tools/utils/widgets.py +++ b/openpype/tools/utils/widgets.py @@ -11,6 +11,28 @@ from openpype.style import ( log = logging.getLogger(__name__) +class CustomTextComboBox(QtWidgets.QComboBox): + """Combobox which can have different text showed.""" + + def __init__(self, *args, **kwargs): + self._custom_text = None + super(CustomTextComboBox, self).__init__(*args, **kwargs) + + def set_custom_text(self, text=None): + if self._custom_text != text: + self._custom_text = text + self.repaint() + + def paintEvent(self, event): + painter = QtWidgets.QStylePainter(self) + option = QtWidgets.QStyleOptionComboBox() + self.initStyleOption(option) + if self._custom_text is not None: + option.currentText = self._custom_text + painter.drawComplexControl(QtWidgets.QStyle.CC_ComboBox, option) + painter.drawControl(QtWidgets.QStyle.CE_ComboBoxLabel, option) + + class PlaceholderLineEdit(QtWidgets.QLineEdit): """Set placeholder color of QLineEdit in Qt 5.12 and higher.""" def __init__(self, *args, **kwargs): From cc893a64b44a297f218050e816a37bd6e1d9f583 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 12 Jul 2022 23:26:09 +0200 Subject: [PATCH 268/785] use combobox with custom text in EnumAttrWidget --- openpype/widgets/attribute_defs/widgets.py | 24 ++++++++++++++++++---- 1 file changed, 20 insertions(+), 4 deletions(-) diff --git a/openpype/widgets/attribute_defs/widgets.py b/openpype/widgets/attribute_defs/widgets.py index 7a7035317b..e4c4aba170 100644 --- a/openpype/widgets/attribute_defs/widgets.py +++ b/openpype/widgets/attribute_defs/widgets.py @@ -15,6 +15,7 @@ from openpype.lib.attribute_definitions import ( UISeparatorDef, UILabelDef ) +from openpype.tools.utils import CustomTextComboBox from openpype.widgets.nice_checkbox import NiceCheckbox from .files_widget import FilesWidget @@ -369,8 +370,12 @@ class BoolAttrWidget(_BaseAttrDefWidget): class EnumAttrWidget(_BaseAttrDefWidget): + def __init__(self, *args, **kwargs): + self._multivalue = False + super(EnumAttrWidget, self).__init__(*args, **kwargs) + def _ui_init(self): - input_widget = QtWidgets.QComboBox(self) + input_widget = CustomTextComboBox(self) combo_delegate = QtWidgets.QStyledItemDelegate(input_widget) input_widget.setItemDelegate(combo_delegate) @@ -399,6 +404,9 @@ class EnumAttrWidget(_BaseAttrDefWidget): def _on_value_change(self): new_value = self.current_value() + if self._multivalue: + self._multivalue = False + self._input_widget.set_custom_text(None) self.value_changed.emit(new_value, self.attr_def.id) def current_value(self): @@ -406,15 +414,23 @@ class EnumAttrWidget(_BaseAttrDefWidget): return self._input_widget.itemData(idx) def set_value(self, value, multivalue=False): + if multivalue: + set_value = set(value) + if len(set_value) == 1: + multivalue = False + value = tuple(set_value)[0] + if not multivalue: idx = self._input_widget.findData(value) cur_idx = self._input_widget.currentIndex() if idx != cur_idx and idx >= 0: self._input_widget.setCurrentIndex(idx) - else: - line_edit = self._input_widget.lineEdit() - line_edit.setText("< Multiselection> ") + custom_text = None + if multivalue: + custom_text = "< Multiselection >" + self._input_widget.set_custom_text(custom_text) + self._multivalue = multivalue class UnknownAttrWidget(_BaseAttrDefWidget): From 9811e8a1d5dc586e7b3e7786e03bc5c0ef2f3974 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 12 Jul 2022 23:37:51 +0200 Subject: [PATCH 269/785] fix empty line --- openpype/tools/publisher/widgets/card_view_widgets.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/tools/publisher/widgets/card_view_widgets.py b/openpype/tools/publisher/widgets/card_view_widgets.py index fc8bb2af10..04df85b0fb 100644 --- a/openpype/tools/publisher/widgets/card_view_widgets.py +++ b/openpype/tools/publisher/widgets/card_view_widgets.py @@ -98,7 +98,7 @@ class GroupWidget(QtWidgets.QWidget): instances(list): List of instances in CreateContext. """ - + # Store instances by id and by subset name instances_by_id = {} instances_by_subset_name = collections.defaultdict(list) From 0222a1518d1131e31d586f47dbf4f11b1c1ebbd0 Mon Sep 17 00:00:00 2001 From: OpenPype Date: Wed, 13 Jul 2022 03:57:50 +0000 Subject: [PATCH 270/785] [Automated] Bump version --- CHANGELOG.md | 30 +++++++++++------------------- openpype/version.py | 2 +- pyproject.toml | 2 +- 3 files changed, 13 insertions(+), 21 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 9a0c058f73..55ee51b38a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,6 @@ # Changelog -## [3.12.1-nightly.4](https://github.com/pypeclub/OpenPype/tree/HEAD) +## [3.12.1-nightly.5](https://github.com/pypeclub/OpenPype/tree/HEAD) [Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.12.0...HEAD) @@ -14,6 +14,9 @@ **🚀 Enhancements** +- TrayPublisher: Added more options for grouping of instances [\#3494](https://github.com/pypeclub/OpenPype/pull/3494) +- NewPublisher: Align creator attributes from top to bottom [\#3487](https://github.com/pypeclub/OpenPype/pull/3487) +- NewPublisher: Added ability to use label of instance [\#3484](https://github.com/pypeclub/OpenPype/pull/3484) - General: Creator Plugins have access to project [\#3476](https://github.com/pypeclub/OpenPype/pull/3476) - General: Better arguments order in creator init [\#3475](https://github.com/pypeclub/OpenPype/pull/3475) - Ftrack: Trigger custom ftrack events on project creation and preparation [\#3465](https://github.com/pypeclub/OpenPype/pull/3465) @@ -25,6 +28,11 @@ **🐛 Bug fixes** +- TrayPublisher: Keep use instance label in list view [\#3493](https://github.com/pypeclub/OpenPype/pull/3493) +- General: Extract review use first frame of input sequence [\#3491](https://github.com/pypeclub/OpenPype/pull/3491) +- General: Fix Plist loading for application launch [\#3485](https://github.com/pypeclub/OpenPype/pull/3485) +- Nuke: Workfile tools open on start [\#3479](https://github.com/pypeclub/OpenPype/pull/3479) +- New Publisher: Disabled context change allows creation [\#3478](https://github.com/pypeclub/OpenPype/pull/3478) - General: thumbnail extractor fix [\#3474](https://github.com/pypeclub/OpenPype/pull/3474) - Kitsu: bugfix with sync-service ans publish plugins [\#3473](https://github.com/pypeclub/OpenPype/pull/3473) - Flame: solved problem with multi-selected loading [\#3470](https://github.com/pypeclub/OpenPype/pull/3470) @@ -46,7 +54,9 @@ - General: Use query functions in global plugins [\#3459](https://github.com/pypeclub/OpenPype/pull/3459) - Clockify: Use query functions in clockify actions [\#3458](https://github.com/pypeclub/OpenPype/pull/3458) - General: Use query functions in rest api calls [\#3457](https://github.com/pypeclub/OpenPype/pull/3457) +- General: Use query functions in openpype lib functions [\#3454](https://github.com/pypeclub/OpenPype/pull/3454) - General: Use query functions in load utils [\#3446](https://github.com/pypeclub/OpenPype/pull/3446) +- General: Move publish plugin and publish render abstractions [\#3442](https://github.com/pypeclub/OpenPype/pull/3442) - General: Use Anatomy after move to pipeline [\#3436](https://github.com/pypeclub/OpenPype/pull/3436) - General: Anatomy moved to pipeline [\#3435](https://github.com/pypeclub/OpenPype/pull/3435) - Fusion: Use client query functions [\#3380](https://github.com/pypeclub/OpenPype/pull/3380) @@ -66,8 +76,6 @@ - Webserver: Added CORS middleware [\#3422](https://github.com/pypeclub/OpenPype/pull/3422) - Attribute Defs UI: Files widget show what is allowed to drop in [\#3411](https://github.com/pypeclub/OpenPype/pull/3411) - General: Add ability to change user value for templates [\#3366](https://github.com/pypeclub/OpenPype/pull/3366) -- Hosts: More options for in-host callbacks [\#3357](https://github.com/pypeclub/OpenPype/pull/3357) -- Multiverse: expose some settings to GUI [\#3350](https://github.com/pypeclub/OpenPype/pull/3350) **🐛 Bug fixes** @@ -82,7 +90,6 @@ - Maya: vray device aspect ratio fix [\#3381](https://github.com/pypeclub/OpenPype/pull/3381) - Flame: bunch of publishing issues [\#3377](https://github.com/pypeclub/OpenPype/pull/3377) - Harmony: added unc path to zifile command in Harmony [\#3372](https://github.com/pypeclub/OpenPype/pull/3372) -- Standalone: settings improvements [\#3355](https://github.com/pypeclub/OpenPype/pull/3355) **🔀 Refactored code** @@ -107,16 +114,9 @@ [Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.11.1-nightly.1...3.11.1) -**🆕 New features** - -- Flame: custom export temp folder [\#3346](https://github.com/pypeclub/OpenPype/pull/3346) -- Nuke: removing third-party plugins [\#3344](https://github.com/pypeclub/OpenPype/pull/3344) - **🚀 Enhancements** - Pyblish Pype: Hiding/Close issues [\#3367](https://github.com/pypeclub/OpenPype/pull/3367) -- Ftrack: Removed requirement of pypeclub role from default settings [\#3354](https://github.com/pypeclub/OpenPype/pull/3354) -- Kitsu: Prevent crash on missing frames information [\#3352](https://github.com/pypeclub/OpenPype/pull/3352) **🐛 Bug fixes** @@ -125,19 +125,11 @@ - Nuke: Fix missing variable in extract thumbnail [\#3363](https://github.com/pypeclub/OpenPype/pull/3363) - Nuke: Fix precollect writes [\#3361](https://github.com/pypeclub/OpenPype/pull/3361) - AE- fix validate\_scene\_settings and renderLocal [\#3358](https://github.com/pypeclub/OpenPype/pull/3358) -- deadline: fixing misidentification of revieables [\#3356](https://github.com/pypeclub/OpenPype/pull/3356) -- General: Create only one thumbnail per instance [\#3351](https://github.com/pypeclub/OpenPype/pull/3351) -- nuke: adding extract thumbnail settings 3.10 [\#3347](https://github.com/pypeclub/OpenPype/pull/3347) -- General: Fix last version function [\#3345](https://github.com/pypeclub/OpenPype/pull/3345) ## [3.11.0](https://github.com/pypeclub/OpenPype/tree/3.11.0) (2022-06-17) [Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.11.0-nightly.4...3.11.0) -**🐛 Bug fixes** - -- General: Handle empty source key on instance [\#3342](https://github.com/pypeclub/OpenPype/pull/3342) - ## [3.10.0](https://github.com/pypeclub/OpenPype/tree/3.10.0) (2022-05-26) [Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.10.0-nightly.6...3.10.0) diff --git a/openpype/version.py b/openpype/version.py index 3239b0e2a2..08bb4706cc 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.12.1-nightly.4" +__version__ = "3.12.1-nightly.5" diff --git a/pyproject.toml b/pyproject.toml index f5bd7cc946..1251299612 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "OpenPype" -version = "3.12.1-nightly.4" # OpenPype +version = "3.12.1-nightly.5" # OpenPype description = "Open VFX and Animation pipeline with support." authors = ["OpenPype Team "] license = "MIT License" From 8bbf693a92a247894a10ef531ac433b3740ac154 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Wed, 13 Jul 2022 09:03:41 +0200 Subject: [PATCH 271/785] add unpack and pack tools --- tools/pack_project.ps1 | 39 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 39 insertions(+) create mode 100644 tools/pack_project.ps1 diff --git a/tools/pack_project.ps1 b/tools/pack_project.ps1 new file mode 100644 index 0000000000..36ec3cb96b --- /dev/null +++ b/tools/pack_project.ps1 @@ -0,0 +1,39 @@ +<# +.SYNOPSIS + Helper script OpenPype Packing project. + +.DESCRIPTION + Once you are happy with the project and want to preserve it for future work, just change the project name on line 38 and copy the file into .\OpenPype\tools. Then use the cmd form .EXAMPLE + +.EXAMPLE + +PS> .\tools\run_pack_project.ps1 + +#> +$current_dir = Get-Location +$script_dir = Split-Path -Path $MyInvocation.MyCommand.Definition -Parent +$openpype_root = (Get-Item $script_dir).parent.FullName + +$env:_INSIDE_OPENPYPE_TOOL = "1" + +# make sure Poetry is in PATH +if (-not (Test-Path 'env:POETRY_HOME')) { + $env:POETRY_HOME = "$openpype_root\.poetry" +} +$env:PATH = "$($env:PATH);$($env:POETRY_HOME)\bin" + +Set-Location -Path $openpype_root + +Write-Host ">>> " -NoNewline -ForegroundColor Green +Write-Host "Reading Poetry ... " -NoNewline +if (-not (Test-Path -PathType Container -Path "$($env:POETRY_HOME)\bin")) { + Write-Host "NOT FOUND" -ForegroundColor Yellow + Write-Host "*** " -NoNewline -ForegroundColor Yellow + Write-Host "We need to install Poetry create virtual env first ..." + & "$openpype_root\tools\create_env.ps1" +} else { + Write-Host "OK" -ForegroundColor Green +} + +& "$($env:POETRY_HOME)\bin\poetry" run python "$($openpype_root)\start.py" pack-project --project "OP02_VFX_demo" +Set-Location -Path $current_dir \ No newline at end of file From 9a3a01c9ed6afeec9015b3dd06c5ef2e2438cc60 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Wed, 13 Jul 2022 09:03:50 +0200 Subject: [PATCH 272/785] add unpack tool --- tools/unpack_project.ps1.lnk | Bin 0 -> 1426 bytes 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 tools/unpack_project.ps1.lnk diff --git a/tools/unpack_project.ps1.lnk b/tools/unpack_project.ps1.lnk new file mode 100644 index 0000000000000000000000000000000000000000..56eee50ca235b0ea76eca52ed0040137a07f9786 GIT binary patch literal 1426 zcma)6Nla5w6ulruP?QphqQQWe7!+DM$QVq(Dv(G@^%ueVBh=p-3T^+VzX+HpPPi}< z7aBAqCImFPkOeN(2ohYVi6aX&xO0fcpsBdf6%x;T9cXYNuerk;{<-&^d;4mLC@w7? znrMP+svM<6%7TA?w)yjnZYplvb~woFi%o7mzn%~}7#A{8?TAmKOzZpF$*k#8w`OoE zk#hC=tLrG=Tq;Hn%{!fB;1gB&J(5aws>`pElaeTbevCibt8J2OIf4!+%G}GLS42N7 zqUUdjQINJh5!4B6SkXoqYGs)lM6m(SXzU~5Mh(IWUNBwId5JhPHPj4i0c>6^(^ zOA7FOxN8^ph|;6?mTVJt4FzP`qeky-5duJ+urt0t*ps0d=dq)*dyWFgQ^w61-)Z38 zNURmucUe?tu~wLOA2;Z%0?N8CoGsgn;0jeNQDZD9d{e_5wIfpnd`i?xjqv=4(t`XY zMATufa!xXEx)s7KLt=ItgpC$RI%ElCef3bG2DaYqbIKhmq5F7qiM-SzE07rBRpc~? z?LNgJwKgh#xy2;~bc%{AY(Z!piP7SS|Bx!i5F#$>64_)%Yltz8E5Ik8ApZ@ehW+YY?o$ z$8!ypV>QL*(b?4(H(&9-bYq=AS($M1AOhY51Ee7QHn3R%#E4g-kYBZg?-fxItRC-1 z5rmPTMaQw1q!_ajoYiwAJ#6SLBdms!7+K@pIozvM->5FvUEAJyegoE Date: Wed, 13 Jul 2022 09:19:34 +0200 Subject: [PATCH 273/785] fix forgotten variables --- openpype/lib/project_backpack.py | 2 +- tools/pack_project.ps1 | 2 +- tools/unpack_project.ps1 | 39 +++++++++++++++++++++++++++++++ tools/unpack_project.ps1.lnk | Bin 1426 -> 0 bytes 4 files changed, 41 insertions(+), 2 deletions(-) create mode 100644 tools/unpack_project.ps1 delete mode 100644 tools/unpack_project.ps1.lnk diff --git a/openpype/lib/project_backpack.py b/openpype/lib/project_backpack.py index f0188e6765..ff2f1d4b88 100644 --- a/openpype/lib/project_backpack.py +++ b/openpype/lib/project_backpack.py @@ -53,7 +53,7 @@ def pack_project(project_name, destination_dir=None): Args: project_name(str): Project that should be packaged. - destination_dir(str): Optinal path where zip will be stored. Project's + destination_dir(str): Optional path where zip will be stored. Project's root is used if not passed. """ print("Creating package of project \"{}\"".format(project_name)) diff --git a/tools/pack_project.ps1 b/tools/pack_project.ps1 index 36ec3cb96b..856247f7ca 100644 --- a/tools/pack_project.ps1 +++ b/tools/pack_project.ps1 @@ -35,5 +35,5 @@ if (-not (Test-Path -PathType Container -Path "$($env:POETRY_HOME)\bin")) { Write-Host "OK" -ForegroundColor Green } -& "$($env:POETRY_HOME)\bin\poetry" run python "$($openpype_root)\start.py" pack-project --project "OP02_VFX_demo" +& "$($env:POETRY_HOME)\bin\poetry" run python "$($openpype_root)\start.py" pack-project --project $ARGS Set-Location -Path $current_dir \ No newline at end of file diff --git a/tools/unpack_project.ps1 b/tools/unpack_project.ps1 new file mode 100644 index 0000000000..e7b9e87a7f --- /dev/null +++ b/tools/unpack_project.ps1 @@ -0,0 +1,39 @@ +<# +.SYNOPSIS + Helper script OpenPype Unpacking project. + +.DESCRIPTION + Make sure you had dropped the project from your db and removed the poject data in case you were having them previously. Then on line 38 change the to any path where the zip with project is - usually we are having it here https://drive.google.com/drive/u/0/folders/0AKE4mxImOsAGUk9PVA . Copy the file into .\OpenPype\tools. Then use the cmd form .EXAMPLE + +.EXAMPLE + +PS> .\tools\run_unpack_project.ps1 + +#> +$current_dir = Get-Location +$script_dir = Split-Path -Path $MyInvocation.MyCommand.Definition -Parent +$openpype_root = (Get-Item $script_dir).parent.FullName + +$env:_INSIDE_OPENPYPE_TOOL = "1" + +# make sure Poetry is in PATH +if (-not (Test-Path 'env:POETRY_HOME')) { + $env:POETRY_HOME = "$openpype_root\.poetry" +} +$env:PATH = "$($env:PATH);$($env:POETRY_HOME)\bin" + +Set-Location -Path $openpype_root + +Write-Host ">>> " -NoNewline -ForegroundColor Green +Write-Host "Reading Poetry ... " -NoNewline +if (-not (Test-Path -PathType Container -Path "$($env:POETRY_HOME)\bin")) { + Write-Host "NOT FOUND" -ForegroundColor Yellow + Write-Host "*** " -NoNewline -ForegroundColor Yellow + Write-Host "We need to install Poetry create virtual env first ..." + & "$openpype_root\tools\create_env.ps1" +} else { + Write-Host "OK" -ForegroundColor Green +} + +& "$($env:POETRY_HOME)\bin\poetry" run python "$($openpype_root)\start.py" unpack-project --zipfile $ARGS +Set-Location -Path $current_dir \ No newline at end of file diff --git a/tools/unpack_project.ps1.lnk b/tools/unpack_project.ps1.lnk deleted file mode 100644 index 56eee50ca235b0ea76eca52ed0040137a07f9786..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1426 zcma)6Nla5w6ulruP?QphqQQWe7!+DM$QVq(Dv(G@^%ueVBh=p-3T^+VzX+HpPPi}< z7aBAqCImFPkOeN(2ohYVi6aX&xO0fcpsBdf6%x;T9cXYNuerk;{<-&^d;4mLC@w7? znrMP+svM<6%7TA?w)yjnZYplvb~woFi%o7mzn%~}7#A{8?TAmKOzZpF$*k#8w`OoE zk#hC=tLrG=Tq;Hn%{!fB;1gB&J(5aws>`pElaeTbevCibt8J2OIf4!+%G}GLS42N7 zqUUdjQINJh5!4B6SkXoqYGs)lM6m(SXzU~5Mh(IWUNBwId5JhPHPj4i0c>6^(^ zOA7FOxN8^ph|;6?mTVJt4FzP`qeky-5duJ+urt0t*ps0d=dq)*dyWFgQ^w61-)Z38 zNURmucUe?tu~wLOA2;Z%0?N8CoGsgn;0jeNQDZD9d{e_5wIfpnd`i?xjqv=4(t`XY zMATufa!xXEx)s7KLt=ItgpC$RI%ElCef3bG2DaYqbIKhmq5F7qiM-SzE07rBRpc~? z?LNgJwKgh#xy2;~bc%{AY(Z!piP7SS|Bx!i5F#$>64_)%Yltz8E5Ik8ApZ@ehW+YY?o$ z$8!ypV>QL*(b?4(H(&9-bYq=AS($M1AOhY51Ee7QHn3R%#E4g-kYBZg?-fxItRC-1 z5rmPTMaQw1q!_ajoYiwAJ#6SLBdms!7+K@pIozvM->5FvUEAJyegoE Date: Wed, 13 Jul 2022 07:26:22 +0000 Subject: [PATCH 274/785] [Automated] Bump version --- CHANGELOG.md | 15 +++++---------- openpype/version.py | 2 +- pyproject.toml | 2 +- 3 files changed, 7 insertions(+), 12 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 55ee51b38a..59c51396e3 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,13 +1,9 @@ # Changelog -## [3.12.1-nightly.5](https://github.com/pypeclub/OpenPype/tree/HEAD) +## [3.12.1-nightly.6](https://github.com/pypeclub/OpenPype/tree/HEAD) [Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.12.0...HEAD) -### 📖 Documentation - -- Docs: Added minimal permissions for MongoDB [\#3441](https://github.com/pypeclub/OpenPype/pull/3441) - **🆕 New features** - Maya: Add VDB to Arnold loader [\#3433](https://github.com/pypeclub/OpenPype/pull/3433) @@ -24,7 +20,7 @@ - Blender: Bugfix - Set fps properly on open [\#3426](https://github.com/pypeclub/OpenPype/pull/3426) - Hiero: Add custom scripts menu [\#3425](https://github.com/pypeclub/OpenPype/pull/3425) - Blender: pre pyside install for all platforms [\#3400](https://github.com/pypeclub/OpenPype/pull/3400) -- Maya: Ability to set resolution for playblasts from asset, and override through review instance. [\#3360](https://github.com/pypeclub/OpenPype/pull/3360) +- Maya: Add additional playblast options to review Extractor. [\#3384](https://github.com/pypeclub/OpenPype/pull/3384) **🐛 Bug fixes** @@ -40,7 +36,6 @@ - Resolve: removed few bugs [\#3464](https://github.com/pypeclub/OpenPype/pull/3464) - General: Delete old versions is safer when ftrack is disabled [\#3462](https://github.com/pypeclub/OpenPype/pull/3462) - Nuke: fixing metadata slate TC difference [\#3455](https://github.com/pypeclub/OpenPype/pull/3455) -- Nuke: prerender reviewable fails [\#3450](https://github.com/pypeclub/OpenPype/pull/3450) - Maya: fix hashing in Python 3 for tile rendering [\#3447](https://github.com/pypeclub/OpenPype/pull/3447) - LogViewer: Escape html characters in log message [\#3443](https://github.com/pypeclub/OpenPype/pull/3443) - Nuke: Slate frame is integrated [\#3427](https://github.com/pypeclub/OpenPype/pull/3427) @@ -59,8 +54,8 @@ - General: Move publish plugin and publish render abstractions [\#3442](https://github.com/pypeclub/OpenPype/pull/3442) - General: Use Anatomy after move to pipeline [\#3436](https://github.com/pypeclub/OpenPype/pull/3436) - General: Anatomy moved to pipeline [\#3435](https://github.com/pypeclub/OpenPype/pull/3435) -- Fusion: Use client query functions [\#3380](https://github.com/pypeclub/OpenPype/pull/3380) - Resolve: Use client query functions [\#3379](https://github.com/pypeclub/OpenPype/pull/3379) +- Photoshop: Use client query functions [\#3375](https://github.com/pypeclub/OpenPype/pull/3375) ## [3.12.0](https://github.com/pypeclub/OpenPype/tree/3.12.0) (2022-06-28) @@ -68,6 +63,7 @@ ### 📖 Documentation +- Docs: Added minimal permissions for MongoDB [\#3441](https://github.com/pypeclub/OpenPype/pull/3441) - Fix typo in documentation: pyenv on mac [\#3417](https://github.com/pypeclub/OpenPype/pull/3417) - Linux: update OIIO package [\#3401](https://github.com/pypeclub/OpenPype/pull/3401) @@ -100,9 +96,9 @@ - Hiero: Use client query functions [\#3393](https://github.com/pypeclub/OpenPype/pull/3393) - Nuke: Use client query functions [\#3391](https://github.com/pypeclub/OpenPype/pull/3391) - Maya: Use client query functions [\#3385](https://github.com/pypeclub/OpenPype/pull/3385) +- Fusion: Use client query functions [\#3380](https://github.com/pypeclub/OpenPype/pull/3380) - Harmony: Use client query functions [\#3378](https://github.com/pypeclub/OpenPype/pull/3378) - Celaction: Use client query functions [\#3376](https://github.com/pypeclub/OpenPype/pull/3376) -- Photoshop: Use client query functions [\#3375](https://github.com/pypeclub/OpenPype/pull/3375) - AfterEffects: Use client query functions [\#3374](https://github.com/pypeclub/OpenPype/pull/3374) **Merged pull requests:** @@ -124,7 +120,6 @@ - Harmony: audio validator has wrong logic [\#3364](https://github.com/pypeclub/OpenPype/pull/3364) - Nuke: Fix missing variable in extract thumbnail [\#3363](https://github.com/pypeclub/OpenPype/pull/3363) - Nuke: Fix precollect writes [\#3361](https://github.com/pypeclub/OpenPype/pull/3361) -- AE- fix validate\_scene\_settings and renderLocal [\#3358](https://github.com/pypeclub/OpenPype/pull/3358) ## [3.11.0](https://github.com/pypeclub/OpenPype/tree/3.11.0) (2022-06-17) diff --git a/openpype/version.py b/openpype/version.py index 08bb4706cc..26c7e4fa34 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.12.1-nightly.5" +__version__ = "3.12.1-nightly.6" diff --git a/pyproject.toml b/pyproject.toml index 1251299612..ed7799a7ba 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "OpenPype" -version = "3.12.1-nightly.5" # OpenPype +version = "3.12.1-nightly.6" # OpenPype description = "Open VFX and Animation pipeline with support." authors = ["OpenPype Team "] license = "MIT License" From f9b23a27d20b07b549fcf43277bfc796d73e6da8 Mon Sep 17 00:00:00 2001 From: OpenPype Date: Wed, 13 Jul 2022 07:43:12 +0000 Subject: [PATCH 275/785] [Automated] Release --- CHANGELOG.md | 16 ++++++++++------ openpype/version.py | 2 +- pyproject.toml | 2 +- 3 files changed, 12 insertions(+), 8 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 59c51396e3..cc5bf39a29 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,8 +1,12 @@ # Changelog -## [3.12.1-nightly.6](https://github.com/pypeclub/OpenPype/tree/HEAD) +## [3.12.1](https://github.com/pypeclub/OpenPype/tree/3.12.1) (2022-07-13) -[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.12.0...HEAD) +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.12.0...3.12.1) + +### 📖 Documentation + +- Docs: Added minimal permissions for MongoDB [\#3441](https://github.com/pypeclub/OpenPype/pull/3441) **🆕 New features** @@ -36,11 +40,13 @@ - Resolve: removed few bugs [\#3464](https://github.com/pypeclub/OpenPype/pull/3464) - General: Delete old versions is safer when ftrack is disabled [\#3462](https://github.com/pypeclub/OpenPype/pull/3462) - Nuke: fixing metadata slate TC difference [\#3455](https://github.com/pypeclub/OpenPype/pull/3455) +- Nuke: prerender reviewable fails [\#3450](https://github.com/pypeclub/OpenPype/pull/3450) - Maya: fix hashing in Python 3 for tile rendering [\#3447](https://github.com/pypeclub/OpenPype/pull/3447) - LogViewer: Escape html characters in log message [\#3443](https://github.com/pypeclub/OpenPype/pull/3443) - Nuke: Slate frame is integrated [\#3427](https://github.com/pypeclub/OpenPype/pull/3427) - Maya: Camera extra data - additional fix for \#3304 [\#3386](https://github.com/pypeclub/OpenPype/pull/3386) - Maya: Handle excluding `model` family from frame range validator. [\#3370](https://github.com/pypeclub/OpenPype/pull/3370) +- Harmony: audio validator has wrong logic [\#3364](https://github.com/pypeclub/OpenPype/pull/3364) **🔀 Refactored code** @@ -54,8 +60,8 @@ - General: Move publish plugin and publish render abstractions [\#3442](https://github.com/pypeclub/OpenPype/pull/3442) - General: Use Anatomy after move to pipeline [\#3436](https://github.com/pypeclub/OpenPype/pull/3436) - General: Anatomy moved to pipeline [\#3435](https://github.com/pypeclub/OpenPype/pull/3435) +- Fusion: Use client query functions [\#3380](https://github.com/pypeclub/OpenPype/pull/3380) - Resolve: Use client query functions [\#3379](https://github.com/pypeclub/OpenPype/pull/3379) -- Photoshop: Use client query functions [\#3375](https://github.com/pypeclub/OpenPype/pull/3375) ## [3.12.0](https://github.com/pypeclub/OpenPype/tree/3.12.0) (2022-06-28) @@ -63,7 +69,6 @@ ### 📖 Documentation -- Docs: Added minimal permissions for MongoDB [\#3441](https://github.com/pypeclub/OpenPype/pull/3441) - Fix typo in documentation: pyenv on mac [\#3417](https://github.com/pypeclub/OpenPype/pull/3417) - Linux: update OIIO package [\#3401](https://github.com/pypeclub/OpenPype/pull/3401) @@ -96,9 +101,9 @@ - Hiero: Use client query functions [\#3393](https://github.com/pypeclub/OpenPype/pull/3393) - Nuke: Use client query functions [\#3391](https://github.com/pypeclub/OpenPype/pull/3391) - Maya: Use client query functions [\#3385](https://github.com/pypeclub/OpenPype/pull/3385) -- Fusion: Use client query functions [\#3380](https://github.com/pypeclub/OpenPype/pull/3380) - Harmony: Use client query functions [\#3378](https://github.com/pypeclub/OpenPype/pull/3378) - Celaction: Use client query functions [\#3376](https://github.com/pypeclub/OpenPype/pull/3376) +- Photoshop: Use client query functions [\#3375](https://github.com/pypeclub/OpenPype/pull/3375) - AfterEffects: Use client query functions [\#3374](https://github.com/pypeclub/OpenPype/pull/3374) **Merged pull requests:** @@ -117,7 +122,6 @@ **🐛 Bug fixes** - Nuke: bake streams with slate on farm [\#3368](https://github.com/pypeclub/OpenPype/pull/3368) -- Harmony: audio validator has wrong logic [\#3364](https://github.com/pypeclub/OpenPype/pull/3364) - Nuke: Fix missing variable in extract thumbnail [\#3363](https://github.com/pypeclub/OpenPype/pull/3363) - Nuke: Fix precollect writes [\#3361](https://github.com/pypeclub/OpenPype/pull/3361) diff --git a/openpype/version.py b/openpype/version.py index 26c7e4fa34..c7b0de0381 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.12.1-nightly.6" +__version__ = "3.12.1" diff --git a/pyproject.toml b/pyproject.toml index ed7799a7ba..4bdaaab4ed 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "OpenPype" -version = "3.12.1-nightly.6" # OpenPype +version = "3.12.1" # OpenPype description = "Open VFX and Animation pipeline with support." authors = ["OpenPype Team "] license = "MIT License" From eae292edc856be2e81ff78787929835c5f7fd91c Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 13 Jul 2022 10:10:31 +0200 Subject: [PATCH 276/785] trayp: adding selection rather then project --- .../hosts/traypublisher/plugins/create/create_editorial.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/traypublisher/plugins/create/create_editorial.py b/openpype/hosts/traypublisher/plugins/create/create_editorial.py index d0ce7fa452..afb1368bef 100644 --- a/openpype/hosts/traypublisher/plugins/create/create_editorial.py +++ b/openpype/hosts/traypublisher/plugins/create/create_editorial.py @@ -28,7 +28,7 @@ CLIP_ATTR_DEFS = [ EnumDef( "fps", items={ - "from_project": "From project", + "from_selection": "From selection", 23.997: "23.976", 24: "24", 25: "25", @@ -152,7 +152,7 @@ or updating already created. Publishing will create OTIO file. self.log.info(pre_create_data["fps"]) - if pre_create_data["fps"] == "from_project": + if pre_create_data["fps"] == "from_selection": # get asset doc data attributes fps = asset_doc["data"]["fps"] else: @@ -339,7 +339,6 @@ or updating already created. Publishing will create OTIO file. # add file extension filter only if it is not shot family if family == "shot": parent_instance_label = label - source_data else: families = ["clip"] # add review family if defined From c9e714a7afc37f98c9cb4792fa42b53b1b2272c3 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 13 Jul 2022 12:00:21 +0200 Subject: [PATCH 277/785] implemented tray publisher host using HostBase --- openpype/hosts/traypublisher/api/__init__.py | 16 +----- openpype/hosts/traypublisher/api/pipeline.py | 60 ++++++++++---------- openpype/tools/traypublisher/window.py | 13 +++-- 3 files changed, 41 insertions(+), 48 deletions(-) diff --git a/openpype/hosts/traypublisher/api/__init__.py b/openpype/hosts/traypublisher/api/__init__.py index c461c0c526..4e7284b09a 100644 --- a/openpype/hosts/traypublisher/api/__init__.py +++ b/openpype/hosts/traypublisher/api/__init__.py @@ -1,20 +1,8 @@ from .pipeline import ( - install, - ls, - - set_project_name, - get_context_title, - get_context_data, - update_context_data, + TrayPublisherHost, ) __all__ = ( - "install", - "ls", - - "set_project_name", - "get_context_title", - "get_context_data", - "update_context_data", + "TrayPublisherHost", ) diff --git a/openpype/hosts/traypublisher/api/pipeline.py b/openpype/hosts/traypublisher/api/pipeline.py index 954a0bae47..2d9db7801e 100644 --- a/openpype/hosts/traypublisher/api/pipeline.py +++ b/openpype/hosts/traypublisher/api/pipeline.py @@ -9,6 +9,8 @@ from openpype.pipeline import ( register_creator_plugin_path, legacy_io, ) +from openpype.host import HostBase, INewPublisher + ROOT_DIR = os.path.dirname(os.path.dirname( os.path.abspath(__file__) @@ -17,6 +19,35 @@ PUBLISH_PATH = os.path.join(ROOT_DIR, "plugins", "publish") CREATE_PATH = os.path.join(ROOT_DIR, "plugins", "create") +class TrayPublisherHost(HostBase, INewPublisher): + name = "traypublisher" + + def install(self): + os.environ["AVALON_APP"] = self.name + legacy_io.Session["AVALON_APP"] = self.name + + pyblish.api.register_host("traypublisher") + pyblish.api.register_plugin_path(PUBLISH_PATH) + register_creator_plugin_path(CREATE_PATH) + + def get_context_title(self): + return HostContext.get_project_name() + + def get_context_data(self): + return HostContext.get_context_data() + + def update_context_data(self, data, changes): + HostContext.save_context_data(data, changes) + + def set_project_name(self, project_name): + # TODO Deregister project specific plugins and register new project + # plugins + os.environ["AVALON_PROJECT"] = project_name + legacy_io.Session["AVALON_PROJECT"] = project_name + legacy_io.install() + HostContext.set_project_name(project_name) + + class HostContext: _context_json_path = None @@ -150,32 +181,3 @@ def get_context_data(): def update_context_data(data, changes): HostContext.save_context_data(data) - - -def get_context_title(): - return HostContext.get_project_name() - - -def ls(): - """Probably will never return loaded containers.""" - return [] - - -def install(): - """This is called before a project is known. - - Project is defined with 'set_project_name'. - """ - os.environ["AVALON_APP"] = "traypublisher" - - pyblish.api.register_host("traypublisher") - pyblish.api.register_plugin_path(PUBLISH_PATH) - register_creator_plugin_path(CREATE_PATH) - - -def set_project_name(project_name): - # TODO Deregister project specific plugins and register new project plugins - os.environ["AVALON_PROJECT"] = project_name - legacy_io.Session["AVALON_PROJECT"] = project_name - legacy_io.install() - HostContext.set_project_name(project_name) diff --git a/openpype/tools/traypublisher/window.py b/openpype/tools/traypublisher/window.py index 5934c4aa8a..cc33287091 100644 --- a/openpype/tools/traypublisher/window.py +++ b/openpype/tools/traypublisher/window.py @@ -12,9 +12,7 @@ from openpype.pipeline import ( install_host, AvalonMongoDB, ) -from openpype.hosts.traypublisher import ( - api as traypublisher -) +from openpype.hosts.traypublisher.api import TrayPublisherHost from openpype.tools.publisher import PublisherWindow from openpype.tools.utils.constants import PROJECT_NAME_ROLE from openpype.tools.utils.models import ( @@ -111,9 +109,13 @@ class StandaloneOverlayWidget(QtWidgets.QFrame): if project_name: self._set_project(project_name) + @property + def host(self): + return self._publisher_window.controller.host + def _set_project(self, project_name): self._project_name = project_name - traypublisher.set_project_name(project_name) + self.host.set_project_name(project_name) self.setVisible(False) self.project_selected.emit(project_name) @@ -190,7 +192,8 @@ class TrayPublishWindow(PublisherWindow): def main(): - install_host(traypublisher) + host = TrayPublisherHost() + install_host(host) app = QtWidgets.QApplication([]) window = TrayPublishWindow() window.show() From 74cd74f053023324a9a6c46e67cfd5023147cda6 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 13 Jul 2022 12:00:29 +0200 Subject: [PATCH 278/785] creatos have access to host --- openpype/pipeline/create/creator_plugins.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/openpype/pipeline/create/creator_plugins.py b/openpype/pipeline/create/creator_plugins.py index 91b9d80234..52c76db5ef 100644 --- a/openpype/pipeline/create/creator_plugins.py +++ b/openpype/pipeline/create/creator_plugins.py @@ -102,6 +102,10 @@ class BaseCreator: return self.create_context.project_name + @property + def host(self): + return self.create_context.host + def get_group_label(self): """Group label under which are instances grouped in UI. From 2fbe33750df3c25b1089208bf093326720fec21e Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 13 Jul 2022 12:01:55 +0200 Subject: [PATCH 279/785] implemented helper method to store new instance --- openpype/hosts/traypublisher/api/plugin.py | 21 +++++++++++++++++---- 1 file changed, 17 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/traypublisher/api/plugin.py b/openpype/hosts/traypublisher/api/plugin.py index 202664cfc6..cc93d7c157 100644 --- a/openpype/hosts/traypublisher/api/plugin.py +++ b/openpype/hosts/traypublisher/api/plugin.py @@ -37,6 +37,21 @@ class TrayPublishCreator(Creator): # Use same attributes as for instance attrobites return self.get_instance_attr_defs() + def _store_new_instance(self, new_instance): + """Tray publisher specific method to store instance. + + Instance is stored into "workfile" of traypublisher and also add it + to CreateContext. + + Args: + new_instance (CreatedInstance): Instance that should be stored. + """ + + # Host implementation of storing metadata about instance + HostContext.add_instance(new_instance.data_to_store()) + # Add instance to current context + self._add_instance_to_context(new_instance) + class SettingsCreator(TrayPublishCreator): create_allow_context_change = True @@ -58,10 +73,8 @@ class SettingsCreator(TrayPublishCreator): data["settings_creator"] = True # Create new instance new_instance = CreatedInstance(self.family, subset_name, data, self) - # Host implementation of storing metadata about instance - HostContext.add_instance(new_instance.data_to_store()) - # Add instance to current context - self._add_instance_to_context(new_instance) + + self._store_new_instance(new_instance) def get_instance_attr_defs(self): return [ From feeee29660b33d343459aa4f835b48c0c306a670 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 13 Jul 2022 12:24:56 +0200 Subject: [PATCH 280/785] trayp: adding variant to presets, also renaming `reference` family to `review` --- .../settings/defaults/project_settings/traypublisher.json | 5 ++++- .../projects_schema/schema_project_traypublisher.json | 8 +++++++- 2 files changed, 11 insertions(+), 2 deletions(-) diff --git a/openpype/settings/defaults/project_settings/traypublisher.json b/openpype/settings/defaults/project_settings/traypublisher.json index 2717ab6869..13939a87bc 100644 --- a/openpype/settings/defaults/project_settings/traypublisher.json +++ b/openpype/settings/defaults/project_settings/traypublisher.json @@ -39,7 +39,8 @@ ], "family_presets": [ { - "family": "reference", + "family": "review", + "variant": "Reference", "review": true, "filter_ext": [ "mov", @@ -48,6 +49,7 @@ }, { "family": "plate", + "variant": "", "review": false, "filter_ext": [ "mov", @@ -56,6 +58,7 @@ }, { "family": "audio", + "variant": "", "review": false, "filter_ext": [ "wav" diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_traypublisher.json b/openpype/settings/entities/schemas/projects_schema/schema_project_traypublisher.json index 4c0aaf41e7..8f1caceb49 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_traypublisher.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_traypublisher.json @@ -119,11 +119,17 @@ "key": "family", "label": "Family", "enum_items": [ - {"reference": "reference"}, + {"review": "review"}, {"plate": "plate"}, {"audio": "audio"} ] }, + { + "type": "text", + "key": "variant", + "label": "Variant", + "placeholder": "< Inherited >" + }, { "type": "boolean", "key": "review", From 43fa5f55cb904def429fa87170814cb86738f908 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 13 Jul 2022 12:25:22 +0200 Subject: [PATCH 281/785] trayp: adding audio to review families --- .../traypublisher/plugins/publish/collect_review_family.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/traypublisher/plugins/publish/collect_review_family.py b/openpype/hosts/traypublisher/plugins/publish/collect_review_family.py index 965e251527..54ba12c66c 100644 --- a/openpype/hosts/traypublisher/plugins/publish/collect_review_family.py +++ b/openpype/hosts/traypublisher/plugins/publish/collect_review_family.py @@ -16,7 +16,8 @@ class CollectReviewFamily( "image", "render", "plate", - "review" + "review", + "audio" ] def process(self, instance): From f94b8e9e3db90ded2d09cade25c2665fe9a4c255 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 13 Jul 2022 12:25:50 +0200 Subject: [PATCH 282/785] trayp: editorial creators swarming --- .../plugins/create/create_editorial.py | 131 ++++++++++++------ 1 file changed, 90 insertions(+), 41 deletions(-) diff --git a/openpype/hosts/traypublisher/plugins/create/create_editorial.py b/openpype/hosts/traypublisher/plugins/create/create_editorial.py index afb1368bef..f373d2ac7a 100644 --- a/openpype/hosts/traypublisher/plugins/create/create_editorial.py +++ b/openpype/hosts/traypublisher/plugins/create/create_editorial.py @@ -55,34 +55,26 @@ CLIP_ATTR_DEFS = [ ] -class EditorialClipInstanceCreator(InvisibleTrayPublishCreator): - identifier = "editorial_clip" - family = "clip" +class EditorialClipInstanceCreatorBase(InvisibleTrayPublishCreator): host_name = "traypublisher" - label = "Editorial Clip" - - has_parent = False def __init__( self, project_settings, *args, **kwargs ): - super(EditorialClipInstanceCreator, self).__init__( + super(EditorialClipInstanceCreatorBase, self).__init__( project_settings, *args, **kwargs ) - def create(self, instance_data, source_data): - self.has_parent = source_data.get("has_parent") - + def create(self, instance_data, source_data=None): self.log.info(f"instance_data: {instance_data}") subset_name = instance_data["subset"] - family = instance_data["family"] - return self._create_instance(subset_name, family, instance_data) + return self._create_instance(subset_name, instance_data) - def _create_instance(self, subset_name, family, data): + def _create_instance(self, subset_name, data): # Create new instance - new_instance = CreatedInstance(family, subset_name, data, self) + new_instance = CreatedInstance(self.family, subset_name, data, self) self.log.info(f"instance_data: {pformat(new_instance.data)}") # Host implementation of storing metadata about instance @@ -92,6 +84,19 @@ class EditorialClipInstanceCreator(InvisibleTrayPublishCreator): return new_instance + +class EditorialShotInstanceCreator(EditorialClipInstanceCreatorBase): + identifier = "editorial_shot" + family = "shot" + label = "Editorial Shot" + + def __init__( + self, project_settings, *args, **kwargs + ): + super(EditorialShotInstanceCreator, self).__init__( + project_settings, *args, **kwargs + ) + def get_instance_attr_defs(self): attr_defs = [ TextDef( @@ -99,11 +104,49 @@ class EditorialClipInstanceCreator(InvisibleTrayPublishCreator): label="Asset name", ) ] - if not self.has_parent: - attr_defs.extend(CLIP_ATTR_DEFS) + attr_defs.extend(CLIP_ATTR_DEFS) return attr_defs +class EditorialPlateInstanceCreator(EditorialClipInstanceCreatorBase): + identifier = "editorial_plate" + family = "plate" + label = "Editorial Plate" + + def __init__( + self, project_settings, *args, **kwargs + ): + super(EditorialPlateInstanceCreator, self).__init__( + project_settings, *args, **kwargs + ) + + +class EditorialAudioInstanceCreator(EditorialClipInstanceCreatorBase): + identifier = "editorial_audio" + family = "audio" + label = "Editorial Audio" + + def __init__( + self, project_settings, *args, **kwargs + ): + super(EditorialAudioInstanceCreator, self).__init__( + project_settings, *args, **kwargs + ) + + +class EditorialReviewInstanceCreator(EditorialClipInstanceCreatorBase): + identifier = "editorial_review" + family = "review" + label = "Editorial Review" + + def __init__( + self, project_settings, *args, **kwargs + ): + super(EditorialReviewInstanceCreator, self).__init__( + project_settings, *args, **kwargs + ) + + class EditorialSimpleCreator(TrayPublishCreator): label = "Editorial Simple" @@ -229,8 +272,6 @@ or updating already created. Publishing will create OTIO file. self.asset_name_check = [] - editorial_clip_creator = self.create_context.creators["editorial_clip"] - tracks = otio_timeline.each_child( descended_from_type=otio.schema.Track ) @@ -287,15 +328,17 @@ or updating already created. Publishing will create OTIO file. frame_end = frame_start + (clip_duration - 1) parent_instance_label = None + parent_instance_id = None for _fpreset in family_presets: - source_data = {} + # get variant name from preset or from inharitance + _variant_name = _fpreset.get("variant") or variant_name family = _fpreset["family"] self.log.debug(f"__ family: {family}") self.log.debug(f"__ _fpreset: {_fpreset}") # subset name subset_name = "{}{}".format( - family, variant_name.capitalize() + family, _variant_name.capitalize() ) label = "{}_{}".format( clip_name, @@ -305,10 +348,8 @@ or updating already created. Publishing will create OTIO file. # create shared new instance data instance_data = { "label": label, - "variant": variant_name, + "variant": _variant_name, "family": family, - "families": [], - "group": family.capitalize(), "subset": subset_name, # HACK: just for temporal bug workaround @@ -319,43 +360,51 @@ or updating already created. Publishing will create OTIO file. # parent time properties "trackStartFrame": track_start_frame, + "timelineOffset": timeline_offset, # creator_attributes "creator_attributes": { "asset_name": clip_name, - "timeline_offset": timeline_offset, "workfile_start_frame": workfile_start_frame, - "frameStart": frame_start, - "frameEnd": frame_end, + "frameStart": int(frame_start), + "frameEnd": int(frame_end), "fps": fps, - "handle_start": handle_start, - "handle_end": handle_end, - "clipIn": clip_in, - "clipOut": clip_out, - "sourceIn": source_in, - "sourceOut": source_out, + "handle_start": int(handle_start), + "handle_end": int(handle_end), + "clipIn": int(clip_in), + "clipOut": int(clip_out), + "sourceIn": int(source_in), + "sourceOut": int(source_out), } } # add file extension filter only if it is not shot family if family == "shot": + c_instance = self.create_context.creators[ + "editorial_shot"].create( + instance_data) parent_instance_label = label + parent_instance_id = c_instance.data["instance_id"] else: - families = ["clip"] # add review family if defined - if _fpreset.get("review"): - families.append("review") instance_data.update({ "filterExt": _fpreset["filter_ext"], - "families": families, + "parent_instance_id": parent_instance_id, "creator_attributes": { - "asset_name": clip_name, "parent_instance": parent_instance_label + }, + "publish_attributes": { + "CollectReviewFamily": { + "add_review_family": _fpreset.get("review") + } } }) - source_data["has_parent"] = True - c_instance = editorial_clip_creator.create( - instance_data, source_data) + creator_identifier = f"editorial_{family}" + editorial_clip_creator = self.create_context.creators[ + creator_identifier] + c_instance = editorial_clip_creator.create( + instance_data) + self.log.debug(f"{pformat(dict(c_instance.data))}") def _get_allowed_family_presets(self, pre_create_data): @@ -435,4 +484,4 @@ or updating already created. Publishing will create OTIO file. attr_defs.append(UISeparatorDef()) attr_defs.extend(CLIP_ATTR_DEFS) - return attr_defs + return attr_defs \ No newline at end of file From 9d6eb0d5c2efdbfbee8b40aac3e19cef87f9973e Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 13 Jul 2022 12:30:09 +0200 Subject: [PATCH 283/785] added extract to file action to project list context actions --- openpype/tools/settings/settings/categories.py | 3 +++ openpype/tools/settings/settings/widgets.py | 5 +++++ 2 files changed, 8 insertions(+) diff --git a/openpype/tools/settings/settings/categories.py b/openpype/tools/settings/settings/categories.py index 764f42f1a3..f42027d9e2 100644 --- a/openpype/tools/settings/settings/categories.py +++ b/openpype/tools/settings/settings/categories.py @@ -854,6 +854,9 @@ class ProjectWidget(SettingsCategoryWidget): project_list_widget.version_change_requested.connect( self._on_source_version_change ) + project_list_widget.extract_to_file_requested.connect( + self._on_extract_to_file + ) self.project_list_widget = project_list_widget diff --git a/openpype/tools/settings/settings/widgets.py b/openpype/tools/settings/settings/widgets.py index 45c21d5685..1d94094897 100644 --- a/openpype/tools/settings/settings/widgets.py +++ b/openpype/tools/settings/settings/widgets.py @@ -1008,6 +1008,7 @@ class ProjectSortFilterProxy(QtCore.QSortFilterProxyModel): class ProjectListWidget(QtWidgets.QWidget): project_changed = QtCore.Signal() version_change_requested = QtCore.Signal(str) + extract_to_file_requested = QtCore.Signal() def __init__(self, parent, only_active=False): self._parent = parent @@ -1099,6 +1100,10 @@ class ProjectListWidget(QtWidgets.QWidget): self.version_change_requested ) submenu.addAction(action) + + extract_action = QtWidgets.QAction("Extract to file", menu) + extract_action.triggered.connect(self.extract_to_file_requested) + menu.addMenu(submenu) menu.exec_(QtGui.QCursor.pos()) From 4e137c1f4bd88ad35b4df733a72f26d590c80284 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 13 Jul 2022 12:33:31 +0200 Subject: [PATCH 284/785] add action to menu --- openpype/tools/settings/settings/widgets.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/tools/settings/settings/widgets.py b/openpype/tools/settings/settings/widgets.py index 1d94094897..88d923c16a 100644 --- a/openpype/tools/settings/settings/widgets.py +++ b/openpype/tools/settings/settings/widgets.py @@ -1105,6 +1105,7 @@ class ProjectListWidget(QtWidgets.QWidget): extract_action.triggered.connect(self.extract_to_file_requested) menu.addMenu(submenu) + menu.addAction(extract_action) menu.exec_(QtGui.QCursor.pos()) def on_item_clicked(self, new_index): From f6129b5f5b84d03816e17cfd6489df4d4d300116 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 13 Jul 2022 12:41:15 +0200 Subject: [PATCH 285/785] trigger 'openpype.project.structure.created' topic on finish of create project structure action --- .../event_handlers_user/action_create_project_structure.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/openpype/modules/ftrack/event_handlers_user/action_create_project_structure.py b/openpype/modules/ftrack/event_handlers_user/action_create_project_structure.py index ebea8872f9..df914de854 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_create_project_structure.py +++ b/openpype/modules/ftrack/event_handlers_user/action_create_project_structure.py @@ -84,6 +84,11 @@ class CreateProjectFolders(BaseAction): create_project_folders(basic_paths, project_name) self.create_ftrack_entities(basic_paths, project_entity) + self.trigger_event( + "openpype.project.structure.created", + {"project_name": project_name} + ) + except Exception as exc: self.log.warning("Creating of structure crashed.", exc_info=True) session.rollback() From 927674be1afa804225b1a407b02de0b5ff5146e9 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 13 Jul 2022 15:11:58 +0200 Subject: [PATCH 286/785] OP-3446 - store source file as a 'source' This will be stored in DB in version. Potentially it could be used to populate Ftrack Note. --- .../hosts/traypublisher/plugins/publish/collect_mov_batch.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/openpype/hosts/traypublisher/plugins/publish/collect_mov_batch.py b/openpype/hosts/traypublisher/plugins/publish/collect_mov_batch.py index 2a5e356684..c81d1f77a5 100644 --- a/openpype/hosts/traypublisher/plugins/publish/collect_mov_batch.py +++ b/openpype/hosts/traypublisher/plugins/publish/collect_mov_batch.py @@ -31,4 +31,6 @@ class CollectMovBatch( instance.data["representations"].append(repre) + instance.data["source"] = file_url + self.log.debug("instance.data {}".format(instance.data)) From 450a20dcca1b74d01c23beebdd170b356a8ccb34 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 13 Jul 2022 15:28:01 +0200 Subject: [PATCH 287/785] OP-3481 - add source key to Note formatting Allows to fill value from instance.data["source"] as a {source} in Ftrack Note. --- openpype/modules/ftrack/plugins/publish/integrate_ftrack_note.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_note.py b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_note.py index 952b21546d..77a7ebdfcf 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_note.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_note.py @@ -116,6 +116,7 @@ class IntegrateFtrackNote(pyblish.api.InstancePlugin): "app_name": app_name, "app_label": app_label, "published_paths": "
".join(sorted(published_paths)), + "source": instance.data.get("source", '') } comment = template.format(**format_data) if not comment: From 5d0e68385e13f8f69397a8f78c4c179307ffe1fe Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 13 Jul 2022 15:28:15 +0200 Subject: [PATCH 288/785] OP-3481 - add source key to Note formatting Allows to fill value from instance.data["source"] as a {source} in Ftrack Note. --- .../entities/schemas/projects_schema/schema_project_ftrack.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_ftrack.json b/openpype/settings/entities/schemas/projects_schema/schema_project_ftrack.json index f8f9d5093d..c0069dcdab 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_ftrack.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_ftrack.json @@ -822,7 +822,7 @@ }, { "type": "label", - "label": "Template may contain formatting keys intent, comment, host_name, app_name, app_label and published_paths." + "label": "Template may contain formatting keys intent, comment, host_name, app_name, app_label, published_paths and source." }, { "type": "text", From 0047ca458c3e6febe9b9826fd8938bea4c05339d Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 13 Jul 2022 15:30:10 +0200 Subject: [PATCH 289/785] OP-3481 - simple creators fill 'source' key Source filepats used to fill source value used later in Ftrack note or version in DB. --- .../traypublisher/plugins/publish/collect_simple_instances.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/openpype/hosts/traypublisher/plugins/publish/collect_simple_instances.py b/openpype/hosts/traypublisher/plugins/publish/collect_simple_instances.py index b2be43c701..2fb6fcf3ce 100644 --- a/openpype/hosts/traypublisher/plugins/publish/collect_simple_instances.py +++ b/openpype/hosts/traypublisher/plugins/publish/collect_simple_instances.py @@ -45,6 +45,8 @@ class CollectSettingsSimpleInstances(pyblish.api.InstancePlugin): "files": filenames }) + instance.data["source"] = filepaths + self.log.debug("Created Simple Settings instance {}".format( instance.data )) From dbcf9097d38d8d7a4b094e161291859905ef3a3d Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 13 Jul 2022 15:43:22 +0200 Subject: [PATCH 290/785] OP-3481 - fix source, must be string --- .../traypublisher/plugins/publish/collect_simple_instances.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/traypublisher/plugins/publish/collect_simple_instances.py b/openpype/hosts/traypublisher/plugins/publish/collect_simple_instances.py index 2fb6fcf3ce..1f473ff71c 100644 --- a/openpype/hosts/traypublisher/plugins/publish/collect_simple_instances.py +++ b/openpype/hosts/traypublisher/plugins/publish/collect_simple_instances.py @@ -45,7 +45,7 @@ class CollectSettingsSimpleInstances(pyblish.api.InstancePlugin): "files": filenames }) - instance.data["source"] = filepaths + instance.data["source"] = "\n".join(filepaths) self.log.debug("Created Simple Settings instance {}".format( instance.data From d294ad51748fa5ba02c6ca387f1c3af7ec1ba855 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 13 Jul 2022 16:10:01 +0200 Subject: [PATCH 291/785] add also default values of missing attribute definitions --- openpype/pipeline/create/context.py | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/openpype/pipeline/create/context.py b/openpype/pipeline/create/context.py index aecdb04635..a7a8eba383 100644 --- a/openpype/pipeline/create/context.py +++ b/openpype/pipeline/create/context.py @@ -29,6 +29,7 @@ UpdateData = collections.namedtuple("UpdateData", ["instance", "changes"]) class ImmutableKeyError(TypeError): """Accessed key is immutable so does not allow changes or removements.""" + def __init__(self, key, msg=None): self.immutable_key = key if not msg: @@ -40,6 +41,7 @@ class ImmutableKeyError(TypeError): class HostMissRequiredMethod(Exception): """Host does not have implemented required functions for creation.""" + def __init__(self, host, missing_methods): self.missing_methods = missing_methods self.host = host @@ -66,6 +68,7 @@ class InstanceMember: TODO: Implement and use! """ + def __init__(self, instance, name): self.instance = instance @@ -94,6 +97,7 @@ class AttributeValues: values(dict): Values after possible conversion. origin_data(dict): Values loaded from host before conversion. """ + def __init__(self, attr_defs, values, origin_data=None): from openpype.lib.attribute_definitions import UnknownDef @@ -174,6 +178,10 @@ class AttributeValues: output = {} for key in self._data: output[key] = self[key] + + for key, attr_def in self._attr_defs_by_key.items(): + if key not in output: + output[key] = attr_def.default return output @staticmethod @@ -196,6 +204,7 @@ class CreatorAttributeValues(AttributeValues): Args: instance (CreatedInstance): Instance for which are values hold. """ + def __init__(self, instance, *args, **kwargs): self.instance = instance super(CreatorAttributeValues, self).__init__(*args, **kwargs) @@ -211,6 +220,7 @@ class PublishAttributeValues(AttributeValues): publish_attributes(PublishAttributes): Wrapper for multiple publish attributes is used as parent object. """ + def __init__(self, publish_attributes, *args, **kwargs): self.publish_attributes = publish_attributes super(PublishAttributeValues, self).__init__(*args, **kwargs) @@ -232,6 +242,7 @@ class PublishAttributes: attr_plugins(list): List of publish plugins that may have defined attribute definitions. """ + def __init__(self, parent, origin_data, attr_plugins=None): self.parent = parent self._origin_data = copy.deepcopy(origin_data) @@ -270,6 +281,7 @@ class PublishAttributes: key(str): Plugin name. default: Default value if plugin was not found. """ + if key not in self._data: return default @@ -287,11 +299,13 @@ class PublishAttributes: def plugin_names_order(self): """Plugin names order by their 'order' attribute.""" + for name in self._plugin_names_order: yield name def data_to_store(self): """Convert attribute values to "data to store".""" + output = {} for key, attr_value in self._data.items(): output[key] = attr_value.data_to_store() @@ -299,6 +313,7 @@ class PublishAttributes: def changes(self): """Return changes per each key.""" + changes = {} for key, attr_val in self._data.items(): attr_changes = attr_val.changes() @@ -314,6 +329,7 @@ class PublishAttributes: def set_publish_plugins(self, attr_plugins): """Set publish plugins attribute definitions.""" + self._plugin_names_order = [] self._missing_plugins = [] self.attr_plugins = attr_plugins or [] @@ -365,6 +381,7 @@ class CreatedInstance: `openpype.pipeline.registered_host`. new(bool): Is instance new. """ + # Keys that can't be changed or removed from data after loading using # creator. # - 'creator_attributes' and 'publish_attributes' can change values of @@ -566,6 +583,7 @@ class CreatedInstance: @property def id(self): """Instance identifier.""" + return self._data["instance_id"] @property @@ -574,10 +592,12 @@ class CreatedInstance: Access to data is needed to modify values. """ + return self def changes(self): """Calculate and return changes.""" + changes = {} new_keys = set() for key, new_value in self._data.items(): From ba6afb8be5107affa0179a1ac4f7c64241ba35a3 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 13 Jul 2022 16:10:22 +0200 Subject: [PATCH 292/785] added jpeg extension to default settings --- openpype/settings/defaults/project_settings/traypublisher.json | 2 ++ 1 file changed, 2 insertions(+) diff --git a/openpype/settings/defaults/project_settings/traypublisher.json b/openpype/settings/defaults/project_settings/traypublisher.json index 5afaaee78c..d3e8028cdb 100644 --- a/openpype/settings/defaults/project_settings/traypublisher.json +++ b/openpype/settings/defaults/project_settings/traypublisher.json @@ -116,6 +116,7 @@ ".png", ".dpx", ".jpg", + ".jpeg", ".tiff", ".tif", ".mov", @@ -158,6 +159,7 @@ "extensions": [ ".exr", ".jpg", + ".jpeg", ".dpx", ".bmp", ".tif", From ac118ddefd8d2d0d576de43115d914e2e403f925 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 13 Jul 2022 16:10:42 +0200 Subject: [PATCH 293/785] fill "source" in simple instances --- .../plugins/publish/collect_simple_instances.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/openpype/hosts/traypublisher/plugins/publish/collect_simple_instances.py b/openpype/hosts/traypublisher/plugins/publish/collect_simple_instances.py index b2be43c701..bbd0221c88 100644 --- a/openpype/hosts/traypublisher/plugins/publish/collect_simple_instances.py +++ b/openpype/hosts/traypublisher/plugins/publish/collect_simple_instances.py @@ -1,4 +1,6 @@ import os + +import clique import pyblish.api @@ -29,6 +31,14 @@ class CollectSettingsSimpleInstances(pyblish.api.InstancePlugin): for filename in filepath_item["filenames"] ] + cols, rems = clique.assemble(filepaths) + source = None + if cols: + source = cols[0].format("{head}{padding}{tail}") + elif rems: + source = rems[0] + + instance.data["source"] = source instance.data["sourceFilepaths"] = filepaths instance.data["stagingDir"] = filepath_item["directory"] From 5592b4fb83ad0f3f4b5e222ed0f8361d0ebee346 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 13 Jul 2022 16:10:59 +0200 Subject: [PATCH 294/785] fill representation in instance data update --- openpype/plugins/publish/collect_from_create_context.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/openpype/plugins/publish/collect_from_create_context.py b/openpype/plugins/publish/collect_from_create_context.py index f6ead98809..d2be633cbe 100644 --- a/openpype/plugins/publish/collect_from_create_context.py +++ b/openpype/plugins/publish/collect_from_create_context.py @@ -47,12 +47,11 @@ class CollectFromCreateContext(pyblish.api.ContextPlugin): "label": subset, "name": subset, "family": in_data["family"], - "families": instance_families + "families": instance_families, + "representations": [] }) for key, value in in_data.items(): if key not in instance.data: instance.data[key] = value self.log.info("collected instance: {}".format(instance.data)) self.log.info("parsing data: {}".format(in_data)) - - instance.data["representations"] = list() From 45473c5a832b4db881ca328ad89324ada93ae0e5 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Wed, 13 Jul 2022 16:24:08 +0200 Subject: [PATCH 295/785] add host and families settings to integrators --- .../defaults/project_settings/global.json | 171 ++++++++++++++++++ .../schemas/schema_global_publish.json | 79 ++++++++ 2 files changed, 250 insertions(+) diff --git a/openpype/settings/defaults/project_settings/global.json b/openpype/settings/defaults/project_settings/global.json index 4e9b61100e..545c792d47 100644 --- a/openpype/settings/defaults/project_settings/global.json +++ b/openpype/settings/defaults/project_settings/global.json @@ -171,6 +171,177 @@ ] }, "IntegrateAssetNew": { + "hosts": [ + "aftereffects", + "blender", + "celaction", + "flame", + "fusion", + "harmony", + "hiero", + "houdini", + "nuke", + "photoshop", + "resolve", + "tvpaint", + "unreal", + "standalonepublisher", + "webpublisher" + ], + "families": [ + "workfile", + "pointcache", + "camera", + "animation", + "model", + "mayaAscii", + "mayaScene", + "setdress", + "layout", + "ass", + "vdbcache", + "scene", + "vrayproxy", + "vrayscene_layer", + "render", + "prerender", + "imagesequence", + "review", + "rendersetup", + "rig", + "plate", + "look", + "audio", + "yetiRig", + "yeticache", + "nukenodes", + "gizmo", + "source", + "matchmove", + "image", + "assembly", + "fbx", + "textures", + "action", + "harmony.template", + "harmony.palette", + "editorial", + "background", + "camerarig", + "redshiftproxy", + "effect", + "xgen", + "hda", + "usd", + "staticMesh", + "skeletalMesh", + "mvLook", + "mvUsd", + "mvUsdComposition", + "mvUsdOverride", + "simpleUnrealTexture" + ], + "template_name_profiles": [ + { + "families": [], + "hosts": [], + "task_types": [], + "tasks": [], + "template_name": "publish" + }, + { + "families": [ + "review", + "render", + "prerender" + ], + "hosts": [], + "task_types": [], + "tasks": [], + "template_name": "render" + }, + { + "families": [ + "simpleUnrealTexture" + ], + "hosts": [ + "standalonepublisher" + ], + "task_types": [], + "tasks": [], + "template_name": "simpleUnrealTexture" + }, + { + "families": [ + "staticMesh", + "skeletalMesh" + ], + "hosts": [ + "maya" + ], + "task_types": [], + "tasks": [], + "template_name": "maya2unreal" + } + ] + }, + "IntegrateAsset": { + "hosts": [ + "maya" + ], + "families": [ + "workfile", + "pointcache", + "camera", + "animation", + "model", + "mayaAscii", + "mayaScene", + "setdress", + "layout", + "ass", + "vdbcache", + "scene", + "vrayproxy", + "vrayscene_layer", + "render", + "prerender", + "imagesequence", + "review", + "rendersetup", + "rig", + "plate", + "look", + "audio", + "yetiRig", + "yeticache", + "nukenodes", + "gizmo", + "source", + "matchmove", + "image", + "assembly", + "fbx", + "textures", + "action", + "harmony.template", + "harmony.palette", + "editorial", + "background", + "camerarig", + "redshiftproxy", + "effect", + "xgen", + "hda", + "usd", + "staticMesh", + "skeletalMesh", + "mvLook", + "mvUsd", + "mvUsdComposition", + "mvUsdOverride", + "simpleUnrealTexture" + ], "template_name_profiles": [ { "families": [], diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json index e368916cc9..71eed2e2de 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json @@ -587,6 +587,85 @@ "label": "IntegrateAssetNew", "is_group": true, "children": [ + { + "type": "list", + "key": "hosts", + "label": "Hosts", + "object_type": "text" + }, + { + "key": "families", + "label": "Families", + "type": "list", + "object_type": "text" + }, + { + "type": "list", + "key": "template_name_profiles", + "label": "Template name profiles", + "use_label_wrap": true, + "object_type": { + "type": "dict", + "children": [ + { + "type": "label", + "label": "" + }, + { + "key": "families", + "label": "Families", + "type": "list", + "object_type": "text" + }, + { + "type": "hosts-enum", + "key": "hosts", + "label": "Hosts", + "multiselection": true + }, + { + "key": "task_types", + "label": "Task types", + "type": "task-types-enum" + }, + { + "key": "tasks", + "label": "Task names", + "type": "list", + "object_type": "text" + }, + { + "type": "separator" + }, + { + "type": "text", + "key": "template_name", + "label": "Template name" + } + ] + } + } + ] + }, + { + "type": "dict", + "collapsible": true, + "key": "IntegrateAsset", + "label": "IntegrateAsset", + "is_group": true, + "children": [ + { + "type": "list", + "key": "hosts", + "label": "Hosts", + "object_type": "text" + }, + { + "key": "families", + "label": "Families", + "type": "list", + "object_type": "text" + }, { "type": "list", "key": "template_name_profiles", From be4ac5b56b9c815a268696ff23f072d14b029192 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Wed, 13 Jul 2022 17:25:51 +0200 Subject: [PATCH 296/785] add multiple items and review boolean to tray creator settings --- openpype/hosts/traypublisher/api/plugin.py | 7 +++++- .../project_settings/traypublisher.json | 24 ++++++++++++++++++- .../schema_project_traypublisher.json | 10 ++++++++ 3 files changed, 39 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/traypublisher/api/plugin.py b/openpype/hosts/traypublisher/api/plugin.py index 202664cfc6..a5d08c2967 100644 --- a/openpype/hosts/traypublisher/api/plugin.py +++ b/openpype/hosts/traypublisher/api/plugin.py @@ -1,3 +1,4 @@ +from openpype.lib.attribute_definitions import BoolDef from openpype.pipeline import ( Creator, CreatedInstance @@ -70,8 +71,10 @@ class SettingsCreator(TrayPublishCreator): folders=False, extensions=self.extensions, allow_sequences=self.allow_sequences, + single_item=not self.allow_multiple_items, label="Filepath", - ) + ), + BoolDef("allow_review", label="Reviewable", default=True) ] @classmethod @@ -92,6 +95,8 @@ class SettingsCreator(TrayPublishCreator): "detailed_description": item_data["detailed_description"], "extensions": item_data["extensions"], "allow_sequences": item_data["allow_sequences"], + "allow_multiple_items": item_data["allow_multiple_items"], + "allow_review": item_data["allow_review"], "default_variants": item_data["default_variants"] } ) diff --git a/openpype/settings/defaults/project_settings/traypublisher.json b/openpype/settings/defaults/project_settings/traypublisher.json index d3e8028cdb..e59200a13b 100644 --- a/openpype/settings/defaults/project_settings/traypublisher.json +++ b/openpype/settings/defaults/project_settings/traypublisher.json @@ -10,7 +10,9 @@ ], "description": "Backup of a working scene", "detailed_description": "Workfiles are full scenes from any application that are directly edited by artists. They represent a state of work on a task at a given point and are usually not directly referenced into other scenes.", - "allow_sequences": true, + "allow_sequences": false, + "allow_multiple_items": false, + "allow_review": false, "extensions": [ ".ma", ".mb", @@ -44,6 +46,8 @@ "description": "Clean models", "detailed_description": "Models should only contain geometry data, without any extras like cameras, locators or bones.\n\nKeep in mind that models published from tray publisher are not validated for correctness. ", "allow_sequences": false, + "allow_multiple_items": true, + "allow_review": false, "extensions": [ ".ma", ".mb", @@ -68,6 +72,8 @@ "description": "Geometry Caches", "detailed_description": "Alembic or bgeo cache of animated data", "allow_sequences": true, + "allow_multiple_items": true, + "allow_review": false, "extensions": [ ".abc", ".bgeo", @@ -90,6 +96,8 @@ "description": "Footage Plates", "detailed_description": "Any type of image seqeuence coming from outside of the studio. Usually camera footage, but could also be animatics used for reference.", "allow_sequences": true, + "allow_multiple_items": true, + "allow_review": true, "extensions": [ ".exr", ".png", @@ -111,6 +119,8 @@ "description": "Rendered images or video", "detailed_description": "Sequence or single file renders", "allow_sequences": true, + "allow_multiple_items": true, + "allow_review": true, "extensions": [ ".exr", ".png", @@ -133,6 +143,8 @@ "description": "3d Camera", "detailed_description": "Ideally this should be only camera itself with baked animation, however, it can technically also include helper geometry.", "allow_sequences": false, + "allow_multiple_items": true, + "allow_review": false, "extensions": [ ".abc", ".ma", @@ -156,6 +168,8 @@ "description": "Single image", "detailed_description": "Any image data can be published as image family. References, textures, concept art, matte paints. This is a fallback 2d family for everything that doesn't fit more specific family.", "allow_sequences": false, + "allow_multiple_items": true, + "allow_review": true, "extensions": [ ".exr", ".jpg", @@ -178,6 +192,8 @@ "description": "Sparse volumetric data", "detailed_description": "Hierarchical data structure for the efficient storage and manipulation of sparse volumetric data discretized on three-dimensional grids", "allow_sequences": true, + "allow_multiple_items": true, + "allow_review": false, "extensions": [ ".vdb" ] @@ -195,6 +211,8 @@ "description": "Matchmoving script", "detailed_description": "Script exported from matchmoving application to be later processed into a tracked camera with additional data", "allow_sequences": false, + "allow_multiple_items": true, + "allow_review": false, "extensions": [] }, { @@ -206,6 +224,8 @@ "description": "CG rig file", "detailed_description": "CG rigged character or prop. Rig should be clean of any extra data and directly loadable into it's respective application\t", "allow_sequences": false, + "allow_multiple_items": false, + "allow_review": false, "extensions": [ ".ma", ".blend", @@ -224,6 +244,8 @@ "description": "Simple Unreal Engine texture", "detailed_description": "Texture files with Unreal Engine naming conventions", "allow_sequences": false, + "allow_multiple_items": true, + "allow_review": false, "extensions": [] } ] diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_traypublisher.json b/openpype/settings/entities/schemas/projects_schema/schema_project_traypublisher.json index 55c1b7b7d7..08c95609c0 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_traypublisher.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_traypublisher.json @@ -67,6 +67,16 @@ "label": "Allow sequences", "type": "boolean" }, + { + "key": "allow_multiple_items", + "label": "Allow multiple items", + "type": "boolean" + }, + { + "key": "allow_review", + "label": "Allow review", + "type": "boolean" + }, { "type": "list", "key": "extensions", From 654b0fb1eb8883815c8493ddf665d824d9830bdb Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Wed, 13 Jul 2022 17:26:07 +0200 Subject: [PATCH 297/785] delete obsolete create_review_family plugin --- .../plugins/publish/collect_review_family.py | 31 ------------------- 1 file changed, 31 deletions(-) delete mode 100644 openpype/hosts/traypublisher/plugins/publish/collect_review_family.py diff --git a/openpype/hosts/traypublisher/plugins/publish/collect_review_family.py b/openpype/hosts/traypublisher/plugins/publish/collect_review_family.py deleted file mode 100644 index 965e251527..0000000000 --- a/openpype/hosts/traypublisher/plugins/publish/collect_review_family.py +++ /dev/null @@ -1,31 +0,0 @@ -import pyblish.api -from openpype.lib import BoolDef -from openpype.pipeline import OpenPypePyblishPluginMixin - - -class CollectReviewFamily( - pyblish.api.InstancePlugin, OpenPypePyblishPluginMixin -): - """Add review family.""" - - label = "Collect Review Family" - order = pyblish.api.CollectorOrder - 0.49 - - hosts = ["traypublisher"] - families = [ - "image", - "render", - "plate", - "review" - ] - - def process(self, instance): - values = self.get_attr_values_from_data(instance.data) - if values.get("add_review_family"): - instance.data["families"].append("review") - - @classmethod - def get_attribute_defs(cls): - return [ - BoolDef("add_review_family", label="Review", default=True) - ] From 540e94e72680d9899689b18592deb09267e9ac3d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Wed, 13 Jul 2022 17:44:29 +0200 Subject: [PATCH 298/785] :bug: fix git submodules weirdness --- .../UE_5.0/Content/Python/__init__.py => .gitmodules | 3 ++- openpype/hosts/unreal/integration/UE_5.0/Content/__init__.py | 0 2 files changed, 2 insertions(+), 1 deletion(-) rename openpype/hosts/unreal/integration/UE_5.0/Content/Python/__init__.py => .gitmodules (79%) delete mode 100644 openpype/hosts/unreal/integration/UE_5.0/Content/__init__.py diff --git a/openpype/hosts/unreal/integration/UE_5.0/Content/Python/__init__.py b/.gitmodules similarity index 79% rename from openpype/hosts/unreal/integration/UE_5.0/Content/Python/__init__.py rename to .gitmodules index 1e3eb5e792..b515851c81 100644 --- a/openpype/hosts/unreal/integration/UE_5.0/Content/Python/__init__.py +++ b/.gitmodules @@ -1,6 +1,7 @@ [submodule "vendor/powershell/BurntToast"] path = vendor/powershell/BurntToast url = https://github.com/Windos/BurntToast.git + [submodule "vendor/powershell/PSWriteColor"] path = vendor/powershell/PSWriteColor - url = https://github.com/EvotecIT/PSWriteColor.git \ No newline at end of file + url = https://github.com/EvotecIT/PSWriteColor.git diff --git a/openpype/hosts/unreal/integration/UE_5.0/Content/__init__.py b/openpype/hosts/unreal/integration/UE_5.0/Content/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 From 2d2bf24c454d828a732f12a1dc628e1bfb375595 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Wed, 13 Jul 2022 18:06:03 +0200 Subject: [PATCH 299/785] :bug: init submodules first --- tools/create_env.ps1 | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tools/create_env.ps1 b/tools/create_env.ps1 index b1337b5635..387bdf919c 100644 --- a/tools/create_env.ps1 +++ b/tools/create_env.ps1 @@ -28,9 +28,11 @@ $current_dir = Get-Location $script_dir = Split-Path -Path $MyInvocation.MyCommand.Definition -Parent $openpype_root = (Get-Item $script_dir).parent.FullName +& git submodule update --init --recursive # Install PSWriteColor to support colorized output to terminal $env:PSModulePath = $env:PSModulePath + ";$($openpype_root)\vendor\powershell" + function Exit-WithCode($exitcode) { # Only exit this host process if it's a child of another PowerShell parent process... $parentPID = (Get-CimInstance -ClassName Win32_Process -Filter "ProcessId=$PID" | Select-Object -Property ParentProcessId).ParentProcessId From a427648abd8cb24a6ba906d5eb1ef7eb8212f96b Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 13 Jul 2022 19:05:23 +0200 Subject: [PATCH 300/785] implemented internal drag and drop and disabled sorting --- .../widgets/attribute_defs/files_widget.py | 62 ++++++++++++++++++- 1 file changed, 60 insertions(+), 2 deletions(-) diff --git a/openpype/widgets/attribute_defs/files_widget.py b/openpype/widgets/attribute_defs/files_widget.py index 698a91a1a5..4652e12ab1 100644 --- a/openpype/widgets/attribute_defs/files_widget.py +++ b/openpype/widgets/attribute_defs/files_widget.py @@ -1,6 +1,7 @@ import os import collections import uuid +import json from Qt import QtWidgets, QtCore, QtGui @@ -245,6 +246,62 @@ class FilesModel(QtGui.QStandardItemModel): return item_id, item + def mimeData(self, indexes): + item_ids = [ + index.data(ITEM_ID_ROLE) + for index in indexes + ] + encoded_data = QtCore.QByteArray() + stream = QtCore.QDataStream(encoded_data, QtCore.QIODevice.WriteOnly) + stream.writeQString(json.dumps(item_ids)) + mime_data = super(FilesModel, self).mimeData(indexes) + mime_data.setData("files_widget/internal_move", encoded_data) + return mime_data + + def dropMimeData(self, mime_data, action, row, col, index): + internal_move_data = mime_data.data("files_widget/internal_move") + if isinstance(internal_move_data, QtCore.QByteArray): + # Raw data are already QByteArrat and we don't have to load them + encoded_data = internal_move_data + else: + encoded_data = QtCore.QByteArray.fromRawData(internal_move_data) + stream = QtCore.QDataStream(encoded_data, QtCore.QIODevice.ReadOnly) + text = stream.readQString() + try: + item_ids = json.loads(text) + except Exception: + return False + + # Find matching item after which will be items moved + # - store item before moved items are removed + root = self.invisibleRootItem() + if row >= 0: + src_item = self.item(row) + else: + src_item_id = index.data(ITEM_ID_ROLE) + src_item = self._items_by_id.get(src_item_id) + + # Take out items that should be moved + items = [] + for item_id in item_ids: + item = self._items_by_id.get(item_id) + if item: + self.takeRow(item.row()) + items.append(item) + + # Skip if there are not items that can be moved + if not items: + return False + + # Calculate row where items should be inserted + if src_item: + src_row = src_item.row() + else: + src_row = root.rowCount() + + root.insertRow(src_row, items) + return True + class FilesProxyModel(QtCore.QSortFilterProxyModel): def __init__(self, *args, **kwargs): @@ -428,6 +485,9 @@ class FilesView(QtWidgets.QListView): QtWidgets.QAbstractItemView.ExtendedSelection ) self.setContextMenuPolicy(QtCore.Qt.CustomContextMenu) + self.setAcceptDrops(True) + self.setDragEnabled(True) + self.setDragDropMode(self.InternalMove) remove_btn = InViewButton(self) pix_enabled = paint_image_with_color( @@ -637,8 +697,6 @@ class FilesWidget(QtWidgets.QFrame): ) self._widgets_by_id[item_id] = widget - self._files_proxy_model.sort(0) - if not self._in_set_value: self.value_changed.emit() From 2fbc1aa00676d542b7765773f376f3eee677c348 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 13 Jul 2022 19:06:03 +0200 Subject: [PATCH 301/785] make drop available --- openpype/widgets/attribute_defs/files_widget.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/openpype/widgets/attribute_defs/files_widget.py b/openpype/widgets/attribute_defs/files_widget.py index 4652e12ab1..eeb8f2d6dc 100644 --- a/openpype/widgets/attribute_defs/files_widget.py +++ b/openpype/widgets/attribute_defs/files_widget.py @@ -805,8 +805,12 @@ class FilesWidget(QtWidgets.QFrame): event.accept() def dropEvent(self, event): + if self._multivalue: + return + mime_data = event.mimeData() - if not self._multivalue and mime_data.hasUrls(): + if mime_data.hasUrls(): + event.accept() filepaths = [] for url in mime_data.urls(): filepath = url.toLocalFile() @@ -817,7 +821,8 @@ class FilesWidget(QtWidgets.QFrame): filepaths = self._files_proxy_model.filter_valid_files(filepaths) if filepaths: self._add_filepaths(filepaths) - event.accept() + + super(FilesWidget, self).dropEvent(event) def _add_filepaths(self, filepaths): self._files_model.add_filepaths(filepaths) From 08a9613c660f06d1158b2c8d9a2ffc5303a92e23 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 13 Jul 2022 19:32:44 +0200 Subject: [PATCH 302/785] fix label height in files widget --- .../widgets/attribute_defs/files_widget.py | 30 +++++++++++++++++++ 1 file changed, 30 insertions(+) diff --git a/openpype/widgets/attribute_defs/files_widget.py b/openpype/widgets/attribute_defs/files_widget.py index eeb8f2d6dc..0b3a81e903 100644 --- a/openpype/widgets/attribute_defs/files_widget.py +++ b/openpype/widgets/attribute_defs/files_widget.py @@ -62,6 +62,14 @@ class DropEmpty(QtWidgets.QWidget): widget.setAlignment(QtCore.Qt.AlignCenter) widget.setAttribute(QtCore.Qt.WA_TranslucentBackground) + update_size_timer = QtCore.QTimer() + update_size_timer.setInterval(10) + update_size_timer.setSingleShot(True) + + update_size_timer.timeout.connect(self._on_update_size_timer) + + self._update_size_timer = update_size_timer + self._single_item = single_item self._allow_sequences = allow_sequences self._allowed_extensions = set() @@ -130,7 +138,28 @@ class DropEmpty(QtWidgets.QWidget): ", ".join(sorted(self._allowed_extensions)) ) + if self._items_label_widget.text() == items_label: + return + self._items_label_widget.setText(items_label) + self._update_size_timer.start() + + def resizeEvent(self, event): + super(DropEmpty, self).resizeEvent(event) + self._update_size_timer.start() + + def _on_update_size_timer(self): + """Recalculate height of label with extensions. + + Dynamic QLabel with word wrap does not handle properly it's sizeHint + calculations on show. This way it is recalculated. It is good practice + to trigger this method with small offset using '_update_size_timer'. + """ + + width = self._items_label_widget.width() + height = self._items_label_widget.heightForWidth(width) + self._items_label_widget.setMinimumHeight(height) + self._items_label_widget.updateGeometry() def paintEvent(self, event): super(DropEmpty, self).paintEvent(event) @@ -613,6 +642,7 @@ class FilesWidget(QtWidgets.QFrame): files_view.context_menu_requested.connect( self._on_context_menu_requested ) + self._in_set_value = False self._single_item = single_item self._multivalue = False From 731aecd71c58153dc45586ff67bc362d37e9f281 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 13 Jul 2022 21:48:50 +0200 Subject: [PATCH 303/785] sort plugins separatelly and don't count on order from report --- .../publisher/publish_report_viewer/report_items.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/openpype/tools/publisher/publish_report_viewer/report_items.py b/openpype/tools/publisher/publish_report_viewer/report_items.py index b47d14da25..8a01569723 100644 --- a/openpype/tools/publisher/publish_report_viewer/report_items.py +++ b/openpype/tools/publisher/publish_report_viewer/report_items.py @@ -83,10 +83,8 @@ class PublishReport: logs = [] plugins_items_by_id = {} - plugins_id_order = [] for plugin_data in data["plugins_data"]: item = PluginItem(plugin_data) - plugins_id_order.append(item.id) plugins_items_by_id[item.id] = item for instance_data_item in plugin_data["instances_data"]: instance_id = instance_data_item["id"] @@ -95,6 +93,14 @@ class PublishReport: copy.deepcopy(log_item_data), item.id, instance_id ) logs.append(log_item) + sorted_plugins = sorted( + plugins_items_by_id.values(), + key=lambda item: item.order + ) + plugins_id_order = [ + plugin_item.id + for plugin_item in sorted_plugins + ] logs_by_instance_id = collections.defaultdict(list) for log_item in logs: From 84068bdd50a2a5bd5547d969d5f2fb74068067a5 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 14 Jul 2022 11:32:29 +0200 Subject: [PATCH 304/785] added ability to zoom text in report --- .../publish_report_viewer/widgets.py | 67 ++++++++++++++++++- 1 file changed, 65 insertions(+), 2 deletions(-) diff --git a/openpype/tools/publisher/publish_report_viewer/widgets.py b/openpype/tools/publisher/publish_report_viewer/widgets.py index fd226ea0e4..61eb814a56 100644 --- a/openpype/tools/publisher/publish_report_viewer/widgets.py +++ b/openpype/tools/publisher/publish_report_viewer/widgets.py @@ -1,3 +1,4 @@ +from math import ceil from Qt import QtWidgets, QtCore, QtGui from openpype.widgets.nice_checkbox import NiceCheckbox @@ -137,13 +138,75 @@ class PluginLoadReportWidget(QtWidgets.QWidget): self._model.set_report(report) +class ZoomPlainText(QtWidgets.QPlainTextEdit): + def __init__(self, *args, **kwargs): + super(ZoomPlainText, self).__init__(*args, **kwargs) + + anim_timer = QtCore.QTimer() + anim_timer.setInterval(20) + + anim_timer.timeout.connect(self._scaling_callback) + + self._anim_timer = anim_timer + self._zoom_enabled = False + self._scheduled_scalings = 0 + self._point_size = None + + def wheelEvent(self, event): + if not self._zoom_enabled: + super(ZoomPlainText, self).wheelEvent(event) + return + + degrees = float(event.delta()) / 8 + steps = int(ceil(degrees / 5)) + self._scheduled_scalings += steps + if (self._scheduled_scalings * steps < 0): + self._scheduled_scalings = steps + + self._anim_timer.start() + + def _scaling_callback(self): + if self._scheduled_scalings == 0: + self._anim_timer.stop() + return + + factor = 1.0 + (self._scheduled_scalings / 300) + font = self.font() + if self._point_size is None: + self._point_size = font.pointSizeF() + + self._point_size *= factor + if self._point_size < 1: + self._point_size = 1.0 + + font.setPointSizeF(self._point_size) + # Using 'self.setFont(font)' would not be propagated when stylesheets + # are applied on this widget + self.setStyleSheet("font-size: {}pt".format(font.pointSize())) + + if self._scheduled_scalings > 0: + self._scheduled_scalings -= 1 + else: + self._scheduled_scalings += 1 + + def keyPressEvent(self, event): + if event.key() == QtCore.Qt.Key_Control: + self._zoom_enabled = True + super(ZoomPlainText, self).keyPressEvent(event) + + def keyReleaseEvent(self, event): + if event.key() == QtCore.Qt.Key_Control: + self._zoom_enabled = False + super(ZoomPlainText, self).keyReleaseEvent(event) + + class DetailsWidget(QtWidgets.QWidget): def __init__(self, parent): super(DetailsWidget, self).__init__(parent) - output_widget = QtWidgets.QPlainTextEdit(self) - output_widget.setTextInteractionFlags(QtCore.Qt.TextBrowserInteraction) + output_widget = ZoomPlainText(self) output_widget.setObjectName("PublishLogConsole") + output_widget.setTextInteractionFlags(QtCore.Qt.TextBrowserInteraction) layout = QtWidgets.QVBoxLayout(self) layout.setContentsMargins(0, 0, 0, 0) From c16a2d6ed53caa68d44b986c1aec375fda8e34f0 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 14 Jul 2022 12:17:19 +0200 Subject: [PATCH 305/785] moved collect cleanup keys earlier --- openpype/plugins/publish/collect_cleanup_keys.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/plugins/publish/collect_cleanup_keys.py b/openpype/plugins/publish/collect_cleanup_keys.py index 635b038387..b9cd1a9fc9 100644 --- a/openpype/plugins/publish/collect_cleanup_keys.py +++ b/openpype/plugins/publish/collect_cleanup_keys.py @@ -14,7 +14,7 @@ class CollectCleanupKeys(pyblish.api.ContextPlugin): """Prepare keys for 'ExplicitCleanUp' plugin.""" label = "Collect Cleanup Keys" - order = pyblish.api.CollectorOrder + order = pyblish.api.CollectorOrder - 0.5 def process(self, context): context.data["cleanupFullPaths"] = [] From 43d744b24b54f21ca2f7e622ca491af91833e689 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 14 Jul 2022 12:17:43 +0200 Subject: [PATCH 306/785] instance data is filled with instance asset specific values if are not already available on instance --- .../publish/collect_anatomy_instance_data.py | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/openpype/plugins/publish/collect_anatomy_instance_data.py b/openpype/plugins/publish/collect_anatomy_instance_data.py index c75534cf83..f67d3373d9 100644 --- a/openpype/plugins/publish/collect_anatomy_instance_data.py +++ b/openpype/plugins/publish/collect_anatomy_instance_data.py @@ -51,6 +51,7 @@ class CollectAnatomyInstanceData(pyblish.api.ContextPlugin): project_name = legacy_io.active_project() self.fill_missing_asset_docs(context, project_name) + self.fill_instance_data_from_asset(context) self.fill_latest_versions(context, project_name) self.fill_anatomy_data(context) @@ -115,6 +116,23 @@ class CollectAnatomyInstanceData(pyblish.api.ContextPlugin): "Not found asset documents with names \"{}\"." ).format(joined_asset_names)) + def fill_instance_data_from_asset(self, context): + for instance in context: + asset_doc = instance.data.get("assetEntity") + if not asset_doc: + continue + + asset_data = asset_doc["data"] + for key in ( + "fps", + "frameStart", + "frameEnd", + "handleStart", + "handleEnd", + ): + if key not in instance.data and key in asset_data: + instance.data[key] = asset_data[key] + def fill_latest_versions(self, context, project_name): """Try to find latest version for each instance's subset. From 4f133d26b4312bcba7b3b2ddbcac19131667d590 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 14 Jul 2022 12:29:13 +0200 Subject: [PATCH 307/785] changed allow_review to reviewable which affect default value of reviewable on instances --- openpype/hosts/traypublisher/api/plugin.py | 8 +++++-- .../project_settings/traypublisher.json | 22 +++++++++---------- .../schema_project_traypublisher.json | 4 ++-- 3 files changed, 19 insertions(+), 15 deletions(-) diff --git a/openpype/hosts/traypublisher/api/plugin.py b/openpype/hosts/traypublisher/api/plugin.py index 3877d33055..6966935091 100644 --- a/openpype/hosts/traypublisher/api/plugin.py +++ b/openpype/hosts/traypublisher/api/plugin.py @@ -87,7 +87,11 @@ class SettingsCreator(TrayPublishCreator): single_item=not self.allow_multiple_items, label="Filepath", ), - BoolDef("allow_review", label="Reviewable", default=True) + BoolDef( + "reviewable", + label="Reviewable", + default=self.reviewable + ) ] @classmethod @@ -109,7 +113,7 @@ class SettingsCreator(TrayPublishCreator): "extensions": item_data["extensions"], "allow_sequences": item_data["allow_sequences"], "allow_multiple_items": item_data["allow_multiple_items"], - "allow_review": item_data["allow_review"], + "reviewable": item_data["reviewable"], "default_variants": item_data["default_variants"] } ) diff --git a/openpype/settings/defaults/project_settings/traypublisher.json b/openpype/settings/defaults/project_settings/traypublisher.json index e59200a13b..619d54dbaf 100644 --- a/openpype/settings/defaults/project_settings/traypublisher.json +++ b/openpype/settings/defaults/project_settings/traypublisher.json @@ -12,7 +12,7 @@ "detailed_description": "Workfiles are full scenes from any application that are directly edited by artists. They represent a state of work on a task at a given point and are usually not directly referenced into other scenes.", "allow_sequences": false, "allow_multiple_items": false, - "allow_review": false, + "reviewable": false, "extensions": [ ".ma", ".mb", @@ -47,7 +47,7 @@ "detailed_description": "Models should only contain geometry data, without any extras like cameras, locators or bones.\n\nKeep in mind that models published from tray publisher are not validated for correctness. ", "allow_sequences": false, "allow_multiple_items": true, - "allow_review": false, + "reviewable": false, "extensions": [ ".ma", ".mb", @@ -73,7 +73,7 @@ "detailed_description": "Alembic or bgeo cache of animated data", "allow_sequences": true, "allow_multiple_items": true, - "allow_review": false, + "reviewable": false, "extensions": [ ".abc", ".bgeo", @@ -97,7 +97,7 @@ "detailed_description": "Any type of image seqeuence coming from outside of the studio. Usually camera footage, but could also be animatics used for reference.", "allow_sequences": true, "allow_multiple_items": true, - "allow_review": true, + "reviewable": true, "extensions": [ ".exr", ".png", @@ -120,7 +120,7 @@ "detailed_description": "Sequence or single file renders", "allow_sequences": true, "allow_multiple_items": true, - "allow_review": true, + "reviewable": true, "extensions": [ ".exr", ".png", @@ -144,7 +144,7 @@ "detailed_description": "Ideally this should be only camera itself with baked animation, however, it can technically also include helper geometry.", "allow_sequences": false, "allow_multiple_items": true, - "allow_review": false, + "reviewable": false, "extensions": [ ".abc", ".ma", @@ -169,7 +169,7 @@ "detailed_description": "Any image data can be published as image family. References, textures, concept art, matte paints. This is a fallback 2d family for everything that doesn't fit more specific family.", "allow_sequences": false, "allow_multiple_items": true, - "allow_review": true, + "reviewable": true, "extensions": [ ".exr", ".jpg", @@ -193,7 +193,7 @@ "detailed_description": "Hierarchical data structure for the efficient storage and manipulation of sparse volumetric data discretized on three-dimensional grids", "allow_sequences": true, "allow_multiple_items": true, - "allow_review": false, + "reviewable": false, "extensions": [ ".vdb" ] @@ -212,7 +212,7 @@ "detailed_description": "Script exported from matchmoving application to be later processed into a tracked camera with additional data", "allow_sequences": false, "allow_multiple_items": true, - "allow_review": false, + "reviewable": false, "extensions": [] }, { @@ -225,7 +225,7 @@ "detailed_description": "CG rigged character or prop. Rig should be clean of any extra data and directly loadable into it's respective application\t", "allow_sequences": false, "allow_multiple_items": false, - "allow_review": false, + "reviewable": false, "extensions": [ ".ma", ".blend", @@ -245,7 +245,7 @@ "detailed_description": "Texture files with Unreal Engine naming conventions", "allow_sequences": false, "allow_multiple_items": true, - "allow_review": false, + "reviewable": false, "extensions": [] } ] diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_traypublisher.json b/openpype/settings/entities/schemas/projects_schema/schema_project_traypublisher.json index 08c95609c0..269b47459c 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_traypublisher.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_traypublisher.json @@ -73,8 +73,8 @@ "type": "boolean" }, { - "key": "allow_review", - "label": "Allow review", + "key": "reviewable", + "label": "Reviewable", "type": "boolean" }, { From 1eceb7296df14e914a3c1de2c76dd1dcb84ab6ad Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 14 Jul 2022 12:29:43 +0200 Subject: [PATCH 308/785] define some extensions for which reviewable could work --- .../publish/collect_simple_instances.py | 22 +++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/openpype/hosts/traypublisher/plugins/publish/collect_simple_instances.py b/openpype/hosts/traypublisher/plugins/publish/collect_simple_instances.py index f76306cf05..4992d0a8be 100644 --- a/openpype/hosts/traypublisher/plugins/publish/collect_simple_instances.py +++ b/openpype/hosts/traypublisher/plugins/publish/collect_simple_instances.py @@ -12,6 +12,28 @@ class CollectSettingsSimpleInstances(pyblish.api.InstancePlugin): hosts = ["traypublisher"] + _image_extensions = [ + ".ani", ".anim", ".apng", ".art", ".bmp", ".bpg", ".bsave", ".cal", + ".cin", ".cpc", ".cpt", ".dds", ".dpx", ".ecw", ".exr", ".fits", + ".flic", ".flif", ".fpx", ".gif", ".hdri", ".hevc", ".icer", + ".icns", ".ico", ".cur", ".ics", ".ilbm", ".jbig", ".jbig2", + ".jng", ".jpeg", ".jpeg-ls", ".jpeg", ".2000", ".jpg", ".xr", + ".jpeg", ".xt", ".jpeg-hdr", ".kra", ".mng", ".miff", ".nrrd", + ".ora", ".pam", ".pbm", ".pgm", ".ppm", ".pnm", ".pcx", ".pgf", + ".pictor", ".png", ".psb", ".psp", ".qtvr", ".ras", + ".rgbe", ".logluv", ".tiff", ".sgi", ".tga", ".tiff", ".tiff/ep", + ".tiff/it", ".ufo", ".ufp", ".wbmp", ".webp", ".xbm", ".xcf", + ".xpm", ".xwd" + ] + _video_extensions = [ + ".3g2", ".3gp", ".amv", ".asf", ".avi", ".drc", ".f4a", ".f4b", + ".f4p", ".f4v", ".flv", ".gif", ".gifv", ".m2v", ".m4p", ".m4v", + ".mkv", ".mng", ".mov", ".mp2", ".mp4", ".mpe", ".mpeg", ".mpg", + ".mpv", ".mxf", ".nsv", ".ogg", ".ogv", ".qt", ".rm", ".rmvb", + ".roq", ".svi", ".vob", ".webm", ".wmv", ".yuv" + ] + _review_extensions = _image_extensions + _video_extensions + def process(self, instance): if not instance.data.get("settings_creator"): return From a2a83623ff8f3f9ddece29227b60428721e5b56c Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 14 Jul 2022 12:30:40 +0200 Subject: [PATCH 309/785] instance staging dir lead to temp --- .../plugins/publish/collect_simple_instances.py | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/openpype/hosts/traypublisher/plugins/publish/collect_simple_instances.py b/openpype/hosts/traypublisher/plugins/publish/collect_simple_instances.py index 4992d0a8be..6a583b2e50 100644 --- a/openpype/hosts/traypublisher/plugins/publish/collect_simple_instances.py +++ b/openpype/hosts/traypublisher/plugins/publish/collect_simple_instances.py @@ -38,11 +38,15 @@ class CollectSettingsSimpleInstances(pyblish.api.InstancePlugin): if not instance.data.get("settings_creator"): return - if "families" not in instance.data: - instance.data["families"] = [] + # Create instance's staging dir in temp + tmp_folder = tempfile.mkdtemp(prefix="traypublisher_") + instance.data["stagingDir"] = tmp_folder + instance.context.data["cleanupFullPaths"].append(tmp_folder) + + self.log.debug( + "Created temp staging directory for instance {}".format(tmp_folder) + ) - if "representations" not in instance.data: - instance.data["representations"] = [] repres = instance.data["representations"] creator_attributes = instance.data["creator_attributes"] @@ -62,7 +66,6 @@ class CollectSettingsSimpleInstances(pyblish.api.InstancePlugin): instance.data["source"] = source instance.data["sourceFilepaths"] = filepaths - instance.data["stagingDir"] = filepath_item["directory"] filenames = filepath_item["filenames"] _, ext = os.path.splitext(filenames[0]) From a088db2db9db034c752d7dd299614c3a8e739357 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 14 Jul 2022 12:46:57 +0200 Subject: [PATCH 310/785] modified validator if not existing paths as there is a chance that filepaths are not filled at all --- .../plugins/publish/validate_filepaths.py | 29 ++++++++++++++++--- 1 file changed, 25 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/traypublisher/plugins/publish/validate_filepaths.py b/openpype/hosts/traypublisher/plugins/publish/validate_filepaths.py index c7302b1005..e02116e10b 100644 --- a/openpype/hosts/traypublisher/plugins/publish/validate_filepaths.py +++ b/openpype/hosts/traypublisher/plugins/publish/validate_filepaths.py @@ -3,8 +3,17 @@ import pyblish.api from openpype.pipeline import PublishValidationError -class ValidateWorkfilePath(pyblish.api.InstancePlugin): - """Validate existence of workfile instance existence.""" +class ValidateFilePath(pyblish.api.InstancePlugin): + """Validate existence of source filepaths on instance. + + Plugins looks into key 'sourceFilepaths' and validate if paths there + actually exist on disk. + + Also validate if the key is filled but is empty. In that case also + crashes so do not fill the key if unfilled value should not cause error. + + This is primarily created for Simple Creator instances. + """ label = "Validate Workfile" order = pyblish.api.ValidatorOrder - 0.49 @@ -14,12 +23,24 @@ class ValidateWorkfilePath(pyblish.api.InstancePlugin): def process(self, instance): if "sourceFilepaths" not in instance.data: self.log.info(( - "Can't validate source filepaths existence." + "Skipped validation of source filepaths existence." " Instance does not have collected 'sourceFilepaths'" )) return - filepaths = instance.data.get("sourceFilepaths") + filepaths = instance.data["sourceFilepaths"] + if not filepaths: + raise PublishValidationError( + ( + "Source filepaths of '{}' instance \"{}\" are not filled" + ).format(instance.data["family"], instance.data["name"]), + "File not filled", + ( + "## Files were not filled" + "\nThis could mean that you didn't enter files into file" + "input." + ) + ) not_found_files = [ filepath From bc09a92d52be3782015337c436f89f9ef832a3cd Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 14 Jul 2022 12:47:22 +0200 Subject: [PATCH 311/785] modified simple instance collector to be able handle multivalue of fileitems --- .../publish/collect_simple_instances.py | 121 ++++++++++++++---- 1 file changed, 93 insertions(+), 28 deletions(-) diff --git a/openpype/hosts/traypublisher/plugins/publish/collect_simple_instances.py b/openpype/hosts/traypublisher/plugins/publish/collect_simple_instances.py index 6a583b2e50..1b2129f48e 100644 --- a/openpype/hosts/traypublisher/plugins/publish/collect_simple_instances.py +++ b/openpype/hosts/traypublisher/plugins/publish/collect_simple_instances.py @@ -1,4 +1,6 @@ import os +import json +import tempfile import clique import pyblish.api @@ -38,50 +40,113 @@ class CollectSettingsSimpleInstances(pyblish.api.InstancePlugin): if not instance.data.get("settings_creator"): return + instance_label = instance.data["name"] # Create instance's staging dir in temp tmp_folder = tempfile.mkdtemp(prefix="traypublisher_") instance.data["stagingDir"] = tmp_folder instance.context.data["cleanupFullPaths"].append(tmp_folder) - self.log.debug( - "Created temp staging directory for instance {}".format(tmp_folder) - ) + self.log.debug(( + "Created temp staging directory for instance {}. {}" + ).format(instance_label, tmp_folder)) repres = instance.data["representations"] creator_attributes = instance.data["creator_attributes"] - filepath_item = creator_attributes["filepath"] - self.log.info(filepath_item) - filepaths = [ - os.path.join(filepath_item["directory"], filename) - for filename in filepath_item["filenames"] - ] + self.log.info(json.dumps(creator_attributes)) + filepath_items = creator_attributes["filepath"] + if not isinstance(filepath_items, list): + filepath_items = [filepath_items] - cols, rems = clique.assemble(filepaths) + # Last found representation is used as source for instance source = None + # Check if review is enabled and should be created + reviewable = creator_attributes.get("reviewable") + # Store review representation - first found that can be used for + # review is stored + review_representation = None + review_path = None + + # Make sure there are no representations with same name + repre_names_counter = {} + # Store created names for logging + _repre_names = [] + # Store filepaths for validation of their existence + source_filepaths = [] + + # Create representations + for filepath_item in filepath_items: + filepaths = [ + os.path.join(filepath_item["directory"], filename) + for filename in filepath_item["filenames"] + ] + source_filepaths.extend(filepaths) + + source = self._calculate_source(filepaths) + filenames = filepath_item["filenames"] + _, ext = os.path.splitext(filenames[0]) + if len(filenames) == 1: + filenames = filenames[0] + + repre_name = repre_ext = ext[1:] + if repre_name not in repre_names_counter: + repre_names_counter[repre_name] = 2 + else: + counter = repre_names_counter[repre_name] + repre_names_counter[repre_name] += 1 + repre_name = "{}_{}".format(repre_name, counter) + + _repre_names.append('"{}"'.format(repre_name)) + representation = { + "ext": repre_ext, + "name": repre_name, + "stagingDir": filepath_item["directory"], + "files": filenames, + "tags": [] + } + repres.append(representation) + + if ( + reviewable + and review_representation is None + and ext in self._review_extensions + ): + review_representation = representation + review_path = source + + instance.data["source"] = source + instance.data["sourceFilepaths"] = source_filepaths + + if reviewable: + self._prepare_review(instance, review_representation, review_path) + + self.log.debug(( + "Created Simple Settings instance \"{}\"" + " with {} representations: {}" + ).format(instance_label, len(repres), ", ".join(_repre_names))) + + def _calculate_source(self, filepaths): + if not filepaths: + return None + cols, rems = clique.assemble(filepaths) if cols: source = cols[0].format("{head}{padding}{tail}") elif rems: source = rems[0] + return source - instance.data["source"] = source - instance.data["sourceFilepaths"] = filepaths + def _prepare_review(self, instance, review_representation, review_path): + if not review_representation: + self.log.waring(( + "Didn't find any representation" + " that could be used as source for review" + )) + return - filenames = filepath_item["filenames"] - _, ext = os.path.splitext(filenames[0]) - ext = ext[1:] - if len(filenames) == 1: - filenames = filenames[0] + if "review" not in instance.data["families"]: + instance.data["families"].append("review") - repres.append({ - "ext": ext, - "name": ext, - "stagingDir": filepath_item["directory"], - "files": filenames - }) - - instance.data["source"] = "\n".join(filepaths) - - self.log.debug("Created Simple Settings instance {}".format( - instance.data + review_representation["tags"].append("review") + self.log.debug("Representation {} was marked for review. {}".format( + review_representation["name"], review_path )) From b51c44d6f203a9cf0958bbc83504e4202ec1b98f Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 14 Jul 2022 12:52:35 +0200 Subject: [PATCH 312/785] modified error message --- .../plugins/publish/validate_filepaths.py | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/openpype/hosts/traypublisher/plugins/publish/validate_filepaths.py b/openpype/hosts/traypublisher/plugins/publish/validate_filepaths.py index e02116e10b..749199fbd3 100644 --- a/openpype/hosts/traypublisher/plugins/publish/validate_filepaths.py +++ b/openpype/hosts/traypublisher/plugins/publish/validate_filepaths.py @@ -28,18 +28,22 @@ class ValidateFilePath(pyblish.api.InstancePlugin): )) return + family = instance.data["family"] + label = instance.data["name"] filepaths = instance.data["sourceFilepaths"] if not filepaths: raise PublishValidationError( ( "Source filepaths of '{}' instance \"{}\" are not filled" - ).format(instance.data["family"], instance.data["name"]), + ).format(family, label), "File not filled", ( "## Files were not filled" - "\nThis could mean that you didn't enter files into file" - "input." - ) + "\nThis mean that you didn't enter any files into required" + " file input." + "\n- Please refresh publishing and check instance" + " {}" + ).format(label) ) not_found_files = [ @@ -55,11 +59,7 @@ class ValidateFilePath(pyblish.api.InstancePlugin): raise PublishValidationError( ( "Filepath of '{}' instance \"{}\" does not exist:\n{}" - ).format( - instance.data["family"], - instance.data["name"], - joined_paths - ), + ).format(family, label, joined_paths), "File not found", ( "## Files were not found\nFiles\n{}" From 465c506162cba6b55981f56fba382b38c79ffc11 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 14 Jul 2022 12:52:41 +0200 Subject: [PATCH 313/785] fix typo --- .../traypublisher/plugins/publish/collect_simple_instances.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/traypublisher/plugins/publish/collect_simple_instances.py b/openpype/hosts/traypublisher/plugins/publish/collect_simple_instances.py index 1b2129f48e..8dd2964252 100644 --- a/openpype/hosts/traypublisher/plugins/publish/collect_simple_instances.py +++ b/openpype/hosts/traypublisher/plugins/publish/collect_simple_instances.py @@ -137,7 +137,7 @@ class CollectSettingsSimpleInstances(pyblish.api.InstancePlugin): def _prepare_review(self, instance, review_representation, review_path): if not review_representation: - self.log.waring(( + self.log.warning(( "Didn't find any representation" " that could be used as source for review" )) From 9c1dd8b5ca4df655bf533ac23773926caee1afd6 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 14 Jul 2022 12:59:45 +0200 Subject: [PATCH 314/785] save changes before reset --- openpype/tools/publisher/control.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/openpype/tools/publisher/control.py b/openpype/tools/publisher/control.py index f692bb4000..b48bb61386 100644 --- a/openpype/tools/publisher/control.py +++ b/openpype/tools/publisher/control.py @@ -575,6 +575,8 @@ class PublisherController: # Stop publishing self.stop_publish() + self.save_changes() + # Reset avalon context self.create_context.reset_avalon_context() From 3cb9748613ef8cf8fc9a563e8df93c11b275a7d6 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 14 Jul 2022 13:23:40 +0200 Subject: [PATCH 315/785] trayp: editorial settings for shot metadata --- .../project_settings/traypublisher.json | 30 +++ .../schema_project_traypublisher.json | 185 ++++++++++++++---- 2 files changed, 180 insertions(+), 35 deletions(-) diff --git a/openpype/settings/defaults/project_settings/traypublisher.json b/openpype/settings/defaults/project_settings/traypublisher.json index ee8f90df7f..93f6420c21 100644 --- a/openpype/settings/defaults/project_settings/traypublisher.json +++ b/openpype/settings/defaults/project_settings/traypublisher.json @@ -232,6 +232,36 @@ "default_variants": [ "Main" ], + "clip_name_tokenizer": { + "_sequence_": "(sc\\d{3})", + "_shot_": "(sh\\d{3})" + }, + "shot_rename": { + "enabled": true, + "shot_rename_template": "{project[code]}_{_sequence_}_{_shot_}" + }, + "shot_hierarchy": { + "enabled": true, + "parents_path": "{project}/{folder}/{sequence}", + "parents": [ + { + "type": "project", + "name": "projekt", + "value": "{projekt[name]}" + }, + { + "type": "folder", + "name": "folder", + "value": "shots" + }, + { + "type": "sequence", + "name": "sequence", + "value": "{_sequence_}" + } + ] + }, + "shot_add_tasks": {}, "family_presets": [ { "family": "review", diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_traypublisher.json b/openpype/settings/entities/schemas/projects_schema/schema_project_traypublisher.json index 8f1caceb49..8d95cb19a9 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_traypublisher.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_traypublisher.json @@ -108,42 +108,157 @@ "type": "splitter" }, { - "type": "list", - "key": "family_presets", - "label": "Family presets", - "object_type": { - "type": "dict", - "children": [ - { - "type": "enum", - "key": "family", - "label": "Family", - "enum_items": [ - {"review": "review"}, - {"plate": "plate"}, - {"audio": "audio"} - ] - }, - { - "type": "text", - "key": "variant", - "label": "Variant", - "placeholder": "< Inherited >" - }, - { - "type": "boolean", - "key": "review", - "label": "Review", - "default": true - }, - { - "type": "list", - "key": "filter_ext", - "label": "Allowed input file types", - "object_type": "text" + "type": "collapsible-wrap", + "label": "Shot metadata creator", + "collapsible": true, + "collapsed": true, + "children": [ + { + "key": "clip_name_tokenizer", + "label": "Clip name tokenizer", + "type": "dict-modifiable", + "highlight_content": true, + "tooltip": "Using Regex expression to create tokens. \nThose can be used later in \"Shot rename\" creator \nor \"Shot hierarchy\". \n\nTokens should be decorated with \"_\" on each side", + "object_type": { + "type": "text" } - ] - } + }, + { + "type": "dict", + "key": "shot_rename", + "label": "Shot rename", + "checkbox_key": "enabled", + "children": [ + { + "type": "boolean", + "key": "enabled", + "label": "Enabled" + }, + { + "type": "text", + "key": "shot_rename_template", + "label": "Shot rename template", + "tooltip":"Template only supports Anatomy keys and Tokens \nfrom \"Clip name tokenizer\"" + } + ] + }, + { + "type": "dict", + "key": "shot_hierarchy", + "label": "Shot hierarchy", + "checkbox_key": "enabled", + "children": [ + { + "type": "boolean", + "key": "enabled", + "label": "Enabled" + }, + { + "type": "text", + "key": "parents_path", + "label": "Parents path template", + "tooltip": "Using keys from \"Token to parent convertor\" or tokens directly" + }, + { + "key": "parents", + "label": "Token to parent convertor", + "type": "list", + "highlight_content": true, + "tooltip": "The left side is key to be used in template. \nThe right is value build from Tokens comming from \n\"Clip name tokenizer\"", + "object_type": { + "type": "dict", + "children": [ + { + "type": "enum", + "key": "type", + "label": "Parent type", + "enum_items": [ + {"project": "Project"}, + {"folder": "Folder"}, + {"episode": "Episode"}, + {"sequence": "Sequence"} + ] + }, + { + "type": "text", + "key": "name", + "label": "Parent token name", + "tooltip": "Unique name used in \"Parent path template\"" + }, + { + "type": "text", + "key": "value", + "label": "Parent name value", + "tooltip": "Template where any text, Anatomy keys and Tokens could be used" + } + ] + } + } + ] + }, + { + "key": "shot_add_tasks", + "label": "Add tasks to shot", + "type": "dict-modifiable", + "highlight_content": true, + "object_type": { + "type": "dict", + "children": [ + { + "type": "task-types-enum", + "key": "type", + "label": "Task type" + } + ] + } + } + ] + }, + { + "type": "collapsible-wrap", + "label": "Shot's subset creator", + "collapsible": true, + "collapsed": true, + "children": [ + { + "type": "list", + "key": "family_presets", + "label": "Family presets", + "object_type": { + "type": "dict", + "children": [ + { + "type": "enum", + "key": "family", + "label": "Family", + "enum_items": [ + {"review": "review"}, + {"plate": "plate"}, + {"audio": "audio"} + ] + }, + { + "type": "text", + "key": "variant", + "label": "Variant", + "placeholder": "< Inherited >" + }, + { + "type": "boolean", + "key": "review", + "label": "Review", + "default": true + }, + { + "type": "list", + "key": "filter_ext", + "label": "Allowed input file types", + "object_type": "text" + } + ] + } + } + ] } ] } From 36ae9ff49cff8a460bcd46ef49e7c43c440874a7 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 14 Jul 2022 13:38:40 +0200 Subject: [PATCH 316/785] added some docstring to plugin --- .../plugins/publish/collect_simple_instances.py | 16 +++++++++++++++- 1 file changed, 15 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/traypublisher/plugins/publish/collect_simple_instances.py b/openpype/hosts/traypublisher/plugins/publish/collect_simple_instances.py index 8dd2964252..424cf7d88d 100644 --- a/openpype/hosts/traypublisher/plugins/publish/collect_simple_instances.py +++ b/openpype/hosts/traypublisher/plugins/publish/collect_simple_instances.py @@ -7,7 +7,21 @@ import pyblish.api class CollectSettingsSimpleInstances(pyblish.api.InstancePlugin): - """Collect data for instances created by settings creators.""" + """Collect data for instances created by settings creators. + + Plugin create representations based on 'filepath' attribute stored + on instance. + + Representations can be marked for review and in that case is also added + 'review' family to instance families. For review can be marked only one + representation so **first** representation that has extension available + in '_review_extensions' is used for review. + + For 'source' on instance is used path from last created representation. + + Set staging directory on instance. That is probably never used because + each created representation has it's own staging dir. + """ label = "Collect Settings Simple Instances" order = pyblish.api.CollectorOrder - 0.49 From 8083c1ed0aea6b0d9185d86c18d4b2e3c552ea50 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 14 Jul 2022 13:42:30 +0200 Subject: [PATCH 317/785] remove not relevant lines --- .../traypublisher/plugins/publish/collect_simple_instances.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/openpype/hosts/traypublisher/plugins/publish/collect_simple_instances.py b/openpype/hosts/traypublisher/plugins/publish/collect_simple_instances.py index 424cf7d88d..e8e1c1013c 100644 --- a/openpype/hosts/traypublisher/plugins/publish/collect_simple_instances.py +++ b/openpype/hosts/traypublisher/plugins/publish/collect_simple_instances.py @@ -140,8 +140,6 @@ class CollectSettingsSimpleInstances(pyblish.api.InstancePlugin): ).format(instance_label, len(repres), ", ".join(_repre_names))) def _calculate_source(self, filepaths): - if not filepaths: - return None cols, rems = clique.assemble(filepaths) if cols: source = cols[0].format("{head}{padding}{tail}") From b5fa8b524e6fa13669e8f755c97d75f3ea217158 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 14 Jul 2022 13:51:20 +0200 Subject: [PATCH 318/785] unify imports --- openpype/hosts/traypublisher/api/plugin.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/openpype/hosts/traypublisher/api/plugin.py b/openpype/hosts/traypublisher/api/plugin.py index 6966935091..46fb4fdb51 100644 --- a/openpype/hosts/traypublisher/api/plugin.py +++ b/openpype/hosts/traypublisher/api/plugin.py @@ -1,9 +1,8 @@ -from openpype.lib.attribute_definitions import BoolDef +from openpype.lib.attribute_definitions import BoolDef, FileDef from openpype.pipeline import ( Creator, CreatedInstance ) -from openpype.lib import FileDef from .pipeline import ( list_instances, From 7729d53921659abe7751cd007cda78156681afc1 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 14 Jul 2022 14:34:50 +0200 Subject: [PATCH 319/785] files item can have custom extensions label --- openpype/lib/attribute_definitions.py | 13 +++++++--- .../widgets/attribute_defs/files_widget.py | 26 ++++++++++++++----- openpype/widgets/attribute_defs/widgets.py | 5 +++- 3 files changed, 34 insertions(+), 10 deletions(-) diff --git a/openpype/lib/attribute_definitions.py b/openpype/lib/attribute_definitions.py index a1f7c1e0f4..17658eef93 100644 --- a/openpype/lib/attribute_definitions.py +++ b/openpype/lib/attribute_definitions.py @@ -14,6 +14,7 @@ class AbstractAttrDefMeta(ABCMeta): Each object of `AbtractAttrDef` mus have defined 'key' attribute. """ + def __call__(self, *args, **kwargs): obj = super(AbstractAttrDefMeta, self).__call__(*args, **kwargs) init_class = getattr(obj, "__init__class__", None) @@ -45,6 +46,7 @@ class AbtractAttrDef: is_label_horizontal(bool): UI specific argument. Specify if label is next to value input or ahead. """ + is_value_def = True def __init__( @@ -77,6 +79,7 @@ class AbtractAttrDef: Convert passed value to a valid type. Use default if value can't be converted. """ + pass @@ -113,6 +116,7 @@ class UnknownDef(AbtractAttrDef): This attribute can be used to keep existing data unchanged but does not have known definition of type. """ + def __init__(self, key, default=None, **kwargs): kwargs["default"] = default super(UnknownDef, self).__init__(key, **kwargs) @@ -204,6 +208,7 @@ class TextDef(AbtractAttrDef): placeholder(str): UI placeholder for attribute. default(str, None): Default value. Empty string used when not defined. """ + def __init__( self, key, multiline=None, regex=None, placeholder=None, default=None, **kwargs @@ -531,14 +536,15 @@ class FileDef(AbtractAttrDef): Args: single_item(bool): Allow only single path item. folders(bool): Allow folder paths. - extensions(list): Allow files with extensions. Empty list will + extensions(List[str]): Allow files with extensions. Empty list will allow all extensions and None will disable files completely. - default(str, list): Defautl value. + extensions_label(str): Custom label shown instead of extensions in UI. + default(str, List[str]): Default value. """ def __init__( self, key, single_item=True, folders=None, extensions=None, - allow_sequences=True, default=None, **kwargs + allow_sequences=True, extensions_label=None, default=None, **kwargs ): if folders is None and extensions is None: folders = True @@ -578,6 +584,7 @@ class FileDef(AbtractAttrDef): self.folders = folders self.extensions = set(extensions) self.allow_sequences = allow_sequences + self.extensions_label = extensions_label super(FileDef, self).__init__(key, default=default, **kwargs) def __eq__(self, other): diff --git a/openpype/widgets/attribute_defs/files_widget.py b/openpype/widgets/attribute_defs/files_widget.py index 0b3a81e903..508da4893b 100644 --- a/openpype/widgets/attribute_defs/files_widget.py +++ b/openpype/widgets/attribute_defs/files_widget.py @@ -34,7 +34,7 @@ class SupportLabel(QtWidgets.QLabel): class DropEmpty(QtWidgets.QWidget): _empty_extensions = "Any file" - def __init__(self, single_item, allow_sequences, parent): + def __init__(self, single_item, allow_sequences, extensions_label, parent): super(DropEmpty, self).__init__(parent) drop_label_widget = QtWidgets.QLabel("Drag & Drop files here", self) @@ -70,7 +70,11 @@ class DropEmpty(QtWidgets.QWidget): self._update_size_timer = update_size_timer + if extensions_label and not extensions_label.startswith(" "): + extensions_label = " " + extensions_label + self._single_item = single_item + self._extensions_label = extensions_label self._allow_sequences = allow_sequences self._allowed_extensions = set() self._allow_folders = None @@ -123,24 +127,32 @@ class DropEmpty(QtWidgets.QWidget): items_label = "Single " if len(allowed_items) == 1: - allowed_items_label = allowed_items[0] + extensions_label = allowed_items[0] elif len(allowed_items) == 2: - allowed_items_label = " or ".join(allowed_items) + extensions_label = " or ".join(allowed_items) else: last_item = allowed_items.pop(-1) new_last_item = " or ".join(last_item, allowed_items.pop(-1)) allowed_items.append(new_last_item) - allowed_items_label = ", ".join(allowed_items) + extensions_label = ", ".join(allowed_items) + + allowed_items_label = extensions_label items_label += allowed_items_label + label_tooltip = None if self._allowed_extensions: items_label += " of\n{}".format( ", ".join(sorted(self._allowed_extensions)) ) + if self._extensions_label: + label_tooltip = items_label + items_label = self._extensions_label + if self._items_label_widget.text() == items_label: return + self._items_label_widget.setToolTip(label_tooltip) self._items_label_widget.setText(items_label) self._update_size_timer.start() @@ -618,11 +630,13 @@ class FilesView(QtWidgets.QListView): class FilesWidget(QtWidgets.QFrame): value_changed = QtCore.Signal() - def __init__(self, single_item, allow_sequences, parent): + def __init__(self, single_item, allow_sequences, extensions_label, parent): super(FilesWidget, self).__init__(parent) self.setAcceptDrops(True) - empty_widget = DropEmpty(single_item, allow_sequences, self) + empty_widget = DropEmpty( + single_item, allow_sequences, extensions_label, self + ) files_model = FilesModel(single_item, allow_sequences) files_proxy_model = FilesProxyModel() diff --git a/openpype/widgets/attribute_defs/widgets.py b/openpype/widgets/attribute_defs/widgets.py index b6493b80a8..975b2df955 100644 --- a/openpype/widgets/attribute_defs/widgets.py +++ b/openpype/widgets/attribute_defs/widgets.py @@ -443,7 +443,10 @@ class UnknownAttrWidget(_BaseAttrDefWidget): class FileAttrWidget(_BaseAttrDefWidget): def _ui_init(self): input_widget = FilesWidget( - self.attr_def.single_item, self.attr_def.allow_sequences, self + self.attr_def.single_item, + self.attr_def.allow_sequences, + self.attr_def.extensions_label, + self ) if self.attr_def.tooltip: From 1baf0457baac213c2f443d02c217bcedfb5870f5 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 14 Jul 2022 14:35:31 +0200 Subject: [PATCH 320/785] added second file input for reviewables --- openpype/hosts/traypublisher/api/plugin.py | 38 ++++++++++++++++++---- 1 file changed, 32 insertions(+), 6 deletions(-) diff --git a/openpype/hosts/traypublisher/api/plugin.py b/openpype/hosts/traypublisher/api/plugin.py index 46fb4fdb51..cb02a5600e 100644 --- a/openpype/hosts/traypublisher/api/plugin.py +++ b/openpype/hosts/traypublisher/api/plugin.py @@ -1,4 +1,4 @@ -from openpype.lib.attribute_definitions import BoolDef, FileDef +from openpype.lib.attribute_definitions import FileDef from openpype.pipeline import ( Creator, CreatedInstance @@ -12,6 +12,29 @@ from .pipeline import ( ) +IMAGE_EXTENSIONS = [ + ".ani", ".anim", ".apng", ".art", ".bmp", ".bpg", ".bsave", ".cal", + ".cin", ".cpc", ".cpt", ".dds", ".dpx", ".ecw", ".exr", ".fits", + ".flic", ".flif", ".fpx", ".gif", ".hdri", ".hevc", ".icer", + ".icns", ".ico", ".cur", ".ics", ".ilbm", ".jbig", ".jbig2", + ".jng", ".jpeg", ".jpeg-ls", ".jpeg", ".2000", ".jpg", ".xr", + ".jpeg", ".xt", ".jpeg-hdr", ".kra", ".mng", ".miff", ".nrrd", + ".ora", ".pam", ".pbm", ".pgm", ".ppm", ".pnm", ".pcx", ".pgf", + ".pictor", ".png", ".psb", ".psp", ".qtvr", ".ras", + ".rgbe", ".logluv", ".tiff", ".sgi", ".tga", ".tiff", ".tiff/ep", + ".tiff/it", ".ufo", ".ufp", ".wbmp", ".webp", ".xbm", ".xcf", + ".xpm", ".xwd" +] +VIDEO_EXTENSIONS = [ + ".3g2", ".3gp", ".amv", ".asf", ".avi", ".drc", ".f4a", ".f4b", + ".f4p", ".f4v", ".flv", ".gif", ".gifv", ".m2v", ".m4p", ".m4v", + ".mkv", ".mng", ".mov", ".mp2", ".mp4", ".mpe", ".mpeg", ".mpg", + ".mpv", ".mxf", ".nsv", ".ogg", ".ogv", ".qt", ".rm", ".rmvb", + ".roq", ".svi", ".vob", ".webm", ".wmv", ".yuv" +] +REVIEW_EXTENSIONS = IMAGE_EXTENSIONS + VIDEO_EXTENSIONS + + class TrayPublishCreator(Creator): create_allow_context_change = True host_name = "traypublisher" @@ -84,12 +107,16 @@ class SettingsCreator(TrayPublishCreator): extensions=self.extensions, allow_sequences=self.allow_sequences, single_item=not self.allow_multiple_items, - label="Filepath", + label="Representations", ), - BoolDef( + FileDef( "reviewable", - label="Reviewable", - default=self.reviewable + folders=False, + extensions=REVIEW_EXTENSIONS, + allow_sequences=True, + single_item=True, + label="Reviewable representations", + extensions_label="Single reviewable item" ) ] @@ -112,7 +139,6 @@ class SettingsCreator(TrayPublishCreator): "extensions": item_data["extensions"], "allow_sequences": item_data["allow_sequences"], "allow_multiple_items": item_data["allow_multiple_items"], - "reviewable": item_data["reviewable"], "default_variants": item_data["default_variants"] } ) From 8a6ee91ec2251939a9e9c8f5624765ad2c37b826 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 14 Jul 2022 14:36:00 +0200 Subject: [PATCH 321/785] removed reviewable key from settings (unused) --- .../defaults/project_settings/traypublisher.json | 15 +-------------- .../schema_project_traypublisher.json | 5 ----- 2 files changed, 1 insertion(+), 19 deletions(-) diff --git a/openpype/settings/defaults/project_settings/traypublisher.json b/openpype/settings/defaults/project_settings/traypublisher.json index 619d54dbaf..80c4a6bed1 100644 --- a/openpype/settings/defaults/project_settings/traypublisher.json +++ b/openpype/settings/defaults/project_settings/traypublisher.json @@ -12,7 +12,6 @@ "detailed_description": "Workfiles are full scenes from any application that are directly edited by artists. They represent a state of work on a task at a given point and are usually not directly referenced into other scenes.", "allow_sequences": false, "allow_multiple_items": false, - "reviewable": false, "extensions": [ ".ma", ".mb", @@ -47,7 +46,6 @@ "detailed_description": "Models should only contain geometry data, without any extras like cameras, locators or bones.\n\nKeep in mind that models published from tray publisher are not validated for correctness. ", "allow_sequences": false, "allow_multiple_items": true, - "reviewable": false, "extensions": [ ".ma", ".mb", @@ -73,7 +71,6 @@ "detailed_description": "Alembic or bgeo cache of animated data", "allow_sequences": true, "allow_multiple_items": true, - "reviewable": false, "extensions": [ ".abc", ".bgeo", @@ -97,7 +94,6 @@ "detailed_description": "Any type of image seqeuence coming from outside of the studio. Usually camera footage, but could also be animatics used for reference.", "allow_sequences": true, "allow_multiple_items": true, - "reviewable": true, "extensions": [ ".exr", ".png", @@ -120,7 +116,6 @@ "detailed_description": "Sequence or single file renders", "allow_sequences": true, "allow_multiple_items": true, - "reviewable": true, "extensions": [ ".exr", ".png", @@ -144,7 +139,6 @@ "detailed_description": "Ideally this should be only camera itself with baked animation, however, it can technically also include helper geometry.", "allow_sequences": false, "allow_multiple_items": true, - "reviewable": false, "extensions": [ ".abc", ".ma", @@ -169,7 +163,6 @@ "detailed_description": "Any image data can be published as image family. References, textures, concept art, matte paints. This is a fallback 2d family for everything that doesn't fit more specific family.", "allow_sequences": false, "allow_multiple_items": true, - "reviewable": true, "extensions": [ ".exr", ".jpg", @@ -193,7 +186,6 @@ "detailed_description": "Hierarchical data structure for the efficient storage and manipulation of sparse volumetric data discretized on three-dimensional grids", "allow_sequences": true, "allow_multiple_items": true, - "reviewable": false, "extensions": [ ".vdb" ] @@ -212,7 +204,6 @@ "detailed_description": "Script exported from matchmoving application to be later processed into a tracked camera with additional data", "allow_sequences": false, "allow_multiple_items": true, - "reviewable": false, "extensions": [] }, { @@ -225,7 +216,6 @@ "detailed_description": "CG rigged character or prop. Rig should be clean of any extra data and directly loadable into it's respective application\t", "allow_sequences": false, "allow_multiple_items": false, - "reviewable": false, "extensions": [ ".ma", ".blend", @@ -238,14 +228,11 @@ "identifier": "", "label": "Simple UE texture", "icon": "fa.image", - "default_variants": [ - "" - ], + "default_variants": [], "description": "Simple Unreal Engine texture", "detailed_description": "Texture files with Unreal Engine naming conventions", "allow_sequences": false, "allow_multiple_items": true, - "reviewable": false, "extensions": [] } ] diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_traypublisher.json b/openpype/settings/entities/schemas/projects_schema/schema_project_traypublisher.json index 269b47459c..f11621c76e 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_traypublisher.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_traypublisher.json @@ -72,11 +72,6 @@ "label": "Allow multiple items", "type": "boolean" }, - { - "key": "reviewable", - "label": "Reviewable", - "type": "boolean" - }, { "type": "list", "key": "extensions", From 64b4bdaf880a8b639e921045c4c14a2d6b0cd95b Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 14 Jul 2022 15:19:25 +0200 Subject: [PATCH 322/785] handle review representations using reviewable file input --- .../publish/collect_simple_instances.py | 249 +++++++++++------- 1 file changed, 154 insertions(+), 95 deletions(-) diff --git a/openpype/hosts/traypublisher/plugins/publish/collect_simple_instances.py b/openpype/hosts/traypublisher/plugins/publish/collect_simple_instances.py index e8e1c1013c..b4328d948c 100644 --- a/openpype/hosts/traypublisher/plugins/publish/collect_simple_instances.py +++ b/openpype/hosts/traypublisher/plugins/publish/collect_simple_instances.py @@ -1,5 +1,4 @@ import os -import json import tempfile import clique @@ -28,28 +27,6 @@ class CollectSettingsSimpleInstances(pyblish.api.InstancePlugin): hosts = ["traypublisher"] - _image_extensions = [ - ".ani", ".anim", ".apng", ".art", ".bmp", ".bpg", ".bsave", ".cal", - ".cin", ".cpc", ".cpt", ".dds", ".dpx", ".ecw", ".exr", ".fits", - ".flic", ".flif", ".fpx", ".gif", ".hdri", ".hevc", ".icer", - ".icns", ".ico", ".cur", ".ics", ".ilbm", ".jbig", ".jbig2", - ".jng", ".jpeg", ".jpeg-ls", ".jpeg", ".2000", ".jpg", ".xr", - ".jpeg", ".xt", ".jpeg-hdr", ".kra", ".mng", ".miff", ".nrrd", - ".ora", ".pam", ".pbm", ".pgm", ".ppm", ".pnm", ".pcx", ".pgf", - ".pictor", ".png", ".psb", ".psp", ".qtvr", ".ras", - ".rgbe", ".logluv", ".tiff", ".sgi", ".tga", ".tiff", ".tiff/ep", - ".tiff/it", ".ufo", ".ufp", ".wbmp", ".webp", ".xbm", ".xcf", - ".xpm", ".xwd" - ] - _video_extensions = [ - ".3g2", ".3gp", ".amv", ".asf", ".avi", ".drc", ".f4a", ".f4b", - ".f4p", ".f4v", ".flv", ".gif", ".gifv", ".m2v", ".m4p", ".m4v", - ".mkv", ".mng", ".mov", ".mp2", ".mp4", ".mpe", ".mpeg", ".mpg", - ".mpv", ".mxf", ".nsv", ".ogg", ".ogv", ".qt", ".rm", ".rmvb", - ".roq", ".svi", ".vob", ".webm", ".wmv", ".yuv" - ] - _review_extensions = _image_extensions + _video_extensions - def process(self, instance): if not instance.data.get("settings_creator"): return @@ -64,97 +41,133 @@ class CollectSettingsSimpleInstances(pyblish.api.InstancePlugin): "Created temp staging directory for instance {}. {}" ).format(instance_label, tmp_folder)) - repres = instance.data["representations"] + # Store filepaths for validation of their existence + source_filepaths = [] + # Make sure there are no representations with same name + repre_names_counter = {} + # Store created names for logging + repre_names = [] + # Store set of filepaths per each representation + representation_files_mapping = [] + source = self._create_main_representations( + instance, + source_filepaths, + repre_names_counter, + repre_names, + representation_files_mapping + ) + self._create_review_representation( + instance, + source_filepaths, + repre_names_counter, + repre_names, + representation_files_mapping + ) + + instance.data["source"] = source + instance.data["sourceFilepaths"] = list(set(source_filepaths)) + + self.log.debug( + ( + "Created Simple Settings instance \"{}\"" + " with {} representations: {}" + ).format( + instance_label, + len(instance.data["representations"]), + ", ".join(repre_names) + ) + ) + + def _create_main_representations( + self, + instance, + source_filepaths, + repre_names_counter, + repre_names, + representation_files_mapping + ): creator_attributes = instance.data["creator_attributes"] - self.log.info(json.dumps(creator_attributes)) filepath_items = creator_attributes["filepath"] if not isinstance(filepath_items, list): filepath_items = [filepath_items] - # Last found representation is used as source for instance source = None - # Check if review is enabled and should be created - reviewable = creator_attributes.get("reviewable") - # Store review representation - first found that can be used for - # review is stored - review_representation = None - review_path = None - - # Make sure there are no representations with same name - repre_names_counter = {} - # Store created names for logging - _repre_names = [] - # Store filepaths for validation of their existence - source_filepaths = [] - - # Create representations for filepath_item in filepath_items: - filepaths = [ + # Skip if filepath item does not have filenames + if not filepath_item["filenames"]: + continue + + filepaths = { os.path.join(filepath_item["directory"], filename) for filename in filepath_item["filenames"] - ] + } source_filepaths.extend(filepaths) source = self._calculate_source(filepaths) - filenames = filepath_item["filenames"] - _, ext = os.path.splitext(filenames[0]) - if len(filenames) == 1: - filenames = filenames[0] - - repre_name = repre_ext = ext[1:] - if repre_name not in repre_names_counter: - repre_names_counter[repre_name] = 2 - else: - counter = repre_names_counter[repre_name] - repre_names_counter[repre_name] += 1 - repre_name = "{}_{}".format(repre_name, counter) - - _repre_names.append('"{}"'.format(repre_name)) - representation = { - "ext": repre_ext, - "name": repre_name, - "stagingDir": filepath_item["directory"], - "files": filenames, - "tags": [] - } - repres.append(representation) - - if ( - reviewable - and review_representation is None - and ext in self._review_extensions - ): - review_representation = representation - review_path = source - - instance.data["source"] = source - instance.data["sourceFilepaths"] = source_filepaths - - if reviewable: - self._prepare_review(instance, review_representation, review_path) - - self.log.debug(( - "Created Simple Settings instance \"{}\"" - " with {} representations: {}" - ).format(instance_label, len(repres), ", ".join(_repre_names))) - - def _calculate_source(self, filepaths): - cols, rems = clique.assemble(filepaths) - if cols: - source = cols[0].format("{head}{padding}{tail}") - elif rems: - source = rems[0] + representation = self._create_representation_data( + filepath_item, repre_names_counter, repre_names + ) + instance.data["representations"].append(representation) + representation_files_mapping.append( + (filepaths, representation, source) + ) return source - def _prepare_review(self, instance, review_representation, review_path): - if not review_representation: + def _create_review_representation( + self, + instance, + source_filepaths, + repre_names_counter, + repre_names, + representation_files_mapping + ): + # Skip review representation creation if there are no representations + # created for "main" part + # - review representation must not be created in that case so + # validation can care about it + if not representation_files_mapping: self.log.warning(( - "Didn't find any representation" - " that could be used as source for review" + "There are missing source representations." + " Creation of review representation was skipped." )) return + creator_attributes = instance.data["creator_attributes"] + review_file_item = creator_attributes["reviewable"] + filenames = review_file_item.get("filenames") + if not filenames: + self.log.debug(( + "Filepath for review is not defined." + " Skipping review representation creation." + )) + return + + filepaths = { + os.path.join(review_file_item["directory"], filename) + for filename in filenames + } + source_filepaths.extend(filepaths) + # First try to find out representation with same filepaths + # so it's not needed to create new representation just for review + review_representation = None + # Review path (only for logging) + review_path = None + for item in representation_files_mapping: + _filepaths, representation, repre_path = item + if _filepaths == filepaths: + review_representation = representation + review_path = repre_path + break + + if review_representation is None: + self.log.debug("Creating new review representation") + review_path = self._calculate_source(filepaths) + review_representation = self._create_representation_data( + review_file_item, repre_names_counter, repre_names + ) + instance.data["representations"].append(review_representation) + if "review" not in instance.data["families"]: instance.data["families"].append("review") @@ -162,3 +175,49 @@ class CollectSettingsSimpleInstances(pyblish.api.InstancePlugin): self.log.debug("Representation {} was marked for review. {}".format( review_representation["name"], review_path )) + + def _create_representation_data( + self, filepath_item, repre_names_counter, repre_names + ): + """Create new representation data based on file item. + + Args: + filepath_item (Dict[str, Any]): Item with information about + representation paths. + repre_names_counter (Dict[str, int]): Store count of representation + names. + repre_names (List[str]): All used representation names. For + logging purposes. + + Returns: + Dict: Prepared base representation data. + """ + + filenames = filepath_item["filenames"] + _, ext = os.path.splitext(filenames[0]) + if len(filenames) == 1: + filenames = filenames[0] + + repre_name = repre_ext = ext[1:] + if repre_name not in repre_names_counter: + repre_names_counter[repre_name] = 2 + else: + counter = repre_names_counter[repre_name] + repre_names_counter[repre_name] += 1 + repre_name = "{}_{}".format(repre_name, counter) + repre_names.append(repre_names) + return { + "ext": repre_ext, + "name": repre_name, + "stagingDir": filepath_item["directory"], + "files": filenames, + "tags": [] + } + + def _calculate_source(self, filepaths): + cols, rems = clique.assemble(filepaths) + if cols: + source = cols[0].format("{head}{padding}{tail}") + elif rems: + source = rems[0] + return source From 1561a4790661dbfe1411b31fe58dcf77cb4bacf7 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 14 Jul 2022 15:19:45 +0200 Subject: [PATCH 323/785] changed key 'filepath' to 'representation_files' --- openpype/hosts/traypublisher/api/plugin.py | 2 +- .../traypublisher/plugins/publish/collect_simple_instances.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/traypublisher/api/plugin.py b/openpype/hosts/traypublisher/api/plugin.py index cb02a5600e..9b9425855e 100644 --- a/openpype/hosts/traypublisher/api/plugin.py +++ b/openpype/hosts/traypublisher/api/plugin.py @@ -102,7 +102,7 @@ class SettingsCreator(TrayPublishCreator): def get_instance_attr_defs(self): return [ FileDef( - "filepath", + "representation_files", folders=False, extensions=self.extensions, allow_sequences=self.allow_sequences, diff --git a/openpype/hosts/traypublisher/plugins/publish/collect_simple_instances.py b/openpype/hosts/traypublisher/plugins/publish/collect_simple_instances.py index b4328d948c..15dac9a4c0 100644 --- a/openpype/hosts/traypublisher/plugins/publish/collect_simple_instances.py +++ b/openpype/hosts/traypublisher/plugins/publish/collect_simple_instances.py @@ -88,7 +88,7 @@ class CollectSettingsSimpleInstances(pyblish.api.InstancePlugin): representation_files_mapping ): creator_attributes = instance.data["creator_attributes"] - filepath_items = creator_attributes["filepath"] + filepath_items = creator_attributes["representation_files"] if not isinstance(filepath_items, list): filepath_items = [filepath_items] From 72e07dd0717f958f9ade887bc3aef8715d8343ea Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 14 Jul 2022 15:22:03 +0200 Subject: [PATCH 324/785] trayp: editorial refactory code --- .../plugins/create/create_editorial.py | 306 +++++++++++------- 1 file changed, 193 insertions(+), 113 deletions(-) diff --git a/openpype/hosts/traypublisher/plugins/create/create_editorial.py b/openpype/hosts/traypublisher/plugins/create/create_editorial.py index f373d2ac7a..d591256f8c 100644 --- a/openpype/hosts/traypublisher/plugins/create/create_editorial.py +++ b/openpype/hosts/traypublisher/plugins/create/create_editorial.py @@ -212,12 +212,12 @@ or updating already created. Publishing will create OTIO file. # Create all clip instances clip_instance_properties.update({ "fps": fps, - "parent_asset_name": asset_name + "parent_asset_name": asset_name, + "variant": instance_data["variant"] }) self._get_clip_instances( otio_timeline, clip_instance_properties, - variant_name=instance_data["variant"], family_presets=allowed_family_presets ) @@ -259,17 +259,8 @@ or updating already created. Publishing will create OTIO file. self, otio_timeline, clip_instance_properties, - variant_name, family_presets ): - # get clip instance properties - parent_asset_name = clip_instance_properties["parent_asset_name"] - handle_start = clip_instance_properties["handle_start"] - handle_end = clip_instance_properties["handle_end"] - timeline_offset = clip_instance_properties["timeline_offset"] - workfile_start_frame = clip_instance_properties["workfile_start_frame"] - fps = clip_instance_properties["fps"] - self.asset_name_check = [] tracks = otio_timeline.each_child( @@ -294,118 +285,207 @@ or updating already created. Publishing will create OTIO file. if not self._validate_clip_for_processing(clip): continue - # basic unique asset name - clip_name = os.path.splitext(clip.name)[0].lower() - name = f"{parent_asset_name.split('_')[0]}_{clip_name}" - - # make sure the name is unique - self._validate_name_uniqueness(name) - - # frame ranges data - clip_in = clip.range_in_parent().start_time.value - clip_in += track_start_frame - clip_out = clip.range_in_parent().end_time_inclusive().value - clip_out += track_start_frame - self.log.info(f"clip_in: {clip_in} | clip_out: {clip_out}") - - # add offset in case there is any - self.log.debug(f"__ timeline_offset: {timeline_offset}") - if timeline_offset: - clip_in += timeline_offset - clip_out += timeline_offset - - clip_duration = clip.duration().value - self.log.info(f"clip duration: {clip_duration}") - - source_in = clip.trimmed_range().start_time.value - source_out = source_in + clip_duration - - # define starting frame for future shot - frame_start = ( - clip_in if workfile_start_frame is None - else workfile_start_frame + base_instance_data = self._get_base_instance_data( + clip, + clip_instance_properties, + track_start_frame ) - frame_end = frame_start + (clip_duration - 1) - parent_instance_label = None - parent_instance_id = None + parenting_data = { + "instance_label": None, + "instance_id": None + } for _fpreset in family_presets: - # get variant name from preset or from inharitance - _variant_name = _fpreset.get("variant") or variant_name - family = _fpreset["family"] - self.log.debug(f"__ family: {family}") - self.log.debug(f"__ _fpreset: {_fpreset}") - - # subset name - subset_name = "{}{}".format( - family, _variant_name.capitalize() - ) - label = "{}_{}".format( - clip_name, - subset_name + instance = self._make_subset_instance( + _fpreset["family"], + _fpreset, + deepcopy(base_instance_data), + parenting_data ) + self.log.debug(f"{pformat(dict(instance.data))}") - # create shared new instance data - instance_data = { - "label": label, - "variant": _variant_name, - "family": family, - "subset": subset_name, + def _make_subset_instance( + self, + _fpreset, + family, + future_instance_data, + parenting_data + ): + label = self._make_subset_naming( + _fpreset["family"], + _fpreset, + future_instance_data + ) - # HACK: just for temporal bug workaround - # TODO: should loockup shot name for update - "asset": parent_asset_name, - "name": clip_name, - "task": "", + # add file extension filter only if it is not shot family + if family == "shot": + c_instance = self.create_context.creators[ + "editorial_shot"].create( + future_instance_data) + parenting_data = { + "instance_label": label, + "instance_id": c_instance.data["instance_id"] + } - # parent time properties - "trackStartFrame": track_start_frame, - "timelineOffset": timeline_offset, - - # creator_attributes - "creator_attributes": { - "asset_name": clip_name, - "workfile_start_frame": workfile_start_frame, - "frameStart": int(frame_start), - "frameEnd": int(frame_end), - "fps": fps, - "handle_start": int(handle_start), - "handle_end": int(handle_end), - "clipIn": int(clip_in), - "clipOut": int(clip_out), - "sourceIn": int(source_in), - "sourceOut": int(source_out), - } + else: + # add review family if defined + future_instance_data.update({ + "filterExt": _fpreset["filter_ext"], + "parent_instance_id": parenting_data["instance_id"], + "creator_attributes": { + "parent_instance": parenting_data["instance_label"] + }, + "publish_attributes": { + "CollectReviewFamily": { + "add_review_family": _fpreset.get("review") } - # add file extension filter only if it is not shot family - if family == "shot": - c_instance = self.create_context.creators[ - "editorial_shot"].create( - instance_data) - parent_instance_label = label - parent_instance_id = c_instance.data["instance_id"] - else: - # add review family if defined - instance_data.update({ - "filterExt": _fpreset["filter_ext"], - "parent_instance_id": parent_instance_id, - "creator_attributes": { - "parent_instance": parent_instance_label - }, - "publish_attributes": { - "CollectReviewFamily": { - "add_review_family": _fpreset.get("review") - } - } - }) + } + }) - creator_identifier = f"editorial_{family}" - editorial_clip_creator = self.create_context.creators[ - creator_identifier] - c_instance = editorial_clip_creator.create( - instance_data) + creator_identifier = f"editorial_{family}" + editorial_clip_creator = self.create_context.creators[ + creator_identifier] + c_instance = editorial_clip_creator.create( + future_instance_data) - self.log.debug(f"{pformat(dict(c_instance.data))}") + return c_instance + + def _make_subset_naming( + self, + family, + _fpreset, + future_instance_data + ): + shot_name = future_instance_data["shotName"] + variant_name = future_instance_data["variant"] + + # get variant name from preset or from inharitance + _variant_name = _fpreset.get("variant") or variant_name + + self.log.debug(f"__ family: {family}") + self.log.debug(f"__ _fpreset: {_fpreset}") + + # subset name + subset_name = "{}{}".format( + family, _variant_name.capitalize() + ) + label = "{}_{}".format( + shot_name, + subset_name + ) + + future_instance_data.update({ + "family": family, + "label": label, + "variant": _variant_name, + "subset": subset_name, + }) + + return label + + def _get_base_instance_data( + self, + clip, + clip_instance_properties, + track_start_frame, + ): + # get clip instance properties + parent_asset_name = clip_instance_properties["parent_asset_name"] + handle_start = clip_instance_properties["handle_start"] + handle_end = clip_instance_properties["handle_end"] + timeline_offset = clip_instance_properties["timeline_offset"] + workfile_start_frame = clip_instance_properties["workfile_start_frame"] + fps = clip_instance_properties["fps"] + variant_name = clip_instance_properties["variant"] + + shot_name = self._get_clip_name(clip, parent_asset_name) + + timing_data = self._get_timing_data( + clip, + timeline_offset, + track_start_frame, + workfile_start_frame + ) + + # create creator attributes + creator_attributes = { + "asset_name": shot_name, + "workfile_start_frame": workfile_start_frame, + "fps": fps, + "handle_start": int(handle_start), + "handle_end": int(handle_end) + } + creator_attributes.update(timing_data) + + # create shared new instance data + base_instance_data = { + "shotName": shot_name, + "variant": variant_name, + + # HACK: just for temporal bug workaround + # TODO: should loockup shot name for update + "asset": parent_asset_name, + "task": "", + # parent time properties + "trackStartFrame": track_start_frame, + "timelineOffset": timeline_offset, + # creator_attributes + "creator_attributes": creator_attributes + } + + return base_instance_data + + def _get_timing_data( + self, + clip, + timeline_offset, + track_start_frame, + workfile_start_frame + ): + # frame ranges data + clip_in = clip.range_in_parent().start_time.value + clip_in += track_start_frame + clip_out = clip.range_in_parent().end_time_inclusive().value + clip_out += track_start_frame + self.log.info(f"clip_in: {clip_in} | clip_out: {clip_out}") + + # add offset in case there is any + self.log.debug(f"__ timeline_offset: {timeline_offset}") + if timeline_offset: + clip_in += timeline_offset + clip_out += timeline_offset + + clip_duration = clip.duration().value + self.log.info(f"clip duration: {clip_duration}") + + source_in = clip.trimmed_range().start_time.value + source_out = source_in + clip_duration + + # define starting frame for future shot + frame_start = ( + clip_in if workfile_start_frame is None + else workfile_start_frame + ) + frame_end = frame_start + (clip_duration - 1) + + return { + "frameStart": int(frame_start), + "frameEnd": int(frame_end), + "clipIn": int(clip_in), + "clipOut": int(clip_out), + "sourceIn": int(source_in), + "sourceOut": int(source_out) + } + + def _get_clip_name(self, clip, selected_asset_name): + # basic unique asset name + clip_name = os.path.splitext(clip.name)[0].lower() + name = f"{selected_asset_name.split('_')[0]}_{clip_name}" + + # make sure the name is unique + self._validate_name_uniqueness(name) + + return clip_name def _get_allowed_family_presets(self, pre_create_data): self.log.debug(f"__ pre_create_data: {pre_create_data}") From 2b1654a1e24d70c215cbfcc4e8d0922dcb663a9f Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 14 Jul 2022 15:22:19 +0200 Subject: [PATCH 325/785] fix variable usage --- .../traypublisher/plugins/publish/collect_simple_instances.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/traypublisher/plugins/publish/collect_simple_instances.py b/openpype/hosts/traypublisher/plugins/publish/collect_simple_instances.py index 15dac9a4c0..a5b95138bd 100644 --- a/openpype/hosts/traypublisher/plugins/publish/collect_simple_instances.py +++ b/openpype/hosts/traypublisher/plugins/publish/collect_simple_instances.py @@ -205,7 +205,7 @@ class CollectSettingsSimpleInstances(pyblish.api.InstancePlugin): counter = repre_names_counter[repre_name] repre_names_counter[repre_name] += 1 repre_name = "{}_{}".format(repre_name, counter) - repre_names.append(repre_names) + repre_names.append(repre_name) return { "ext": repre_ext, "name": repre_name, From a5477a15c80e5cbdd3a29541beed2c0e2eb03f8d Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 14 Jul 2022 15:39:02 +0200 Subject: [PATCH 326/785] trayp: debugging after refactory --- .../plugins/create/create_editorial.py | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/openpype/hosts/traypublisher/plugins/create/create_editorial.py b/openpype/hosts/traypublisher/plugins/create/create_editorial.py index d591256f8c..1ff729cf65 100644 --- a/openpype/hosts/traypublisher/plugins/create/create_editorial.py +++ b/openpype/hosts/traypublisher/plugins/create/create_editorial.py @@ -295,9 +295,11 @@ or updating already created. Publishing will create OTIO file. "instance_label": None, "instance_id": None } + self.log.info( + f"Creating subsets from presets: \n{pformat(family_presets)}") + for _fpreset in family_presets: instance = self._make_subset_instance( - _fpreset["family"], _fpreset, deepcopy(base_instance_data), parenting_data @@ -307,12 +309,11 @@ or updating already created. Publishing will create OTIO file. def _make_subset_instance( self, _fpreset, - family, future_instance_data, parenting_data ): + family = _fpreset["family"] label = self._make_subset_naming( - _fpreset["family"], _fpreset, future_instance_data ) @@ -322,10 +323,10 @@ or updating already created. Publishing will create OTIO file. c_instance = self.create_context.creators[ "editorial_shot"].create( future_instance_data) - parenting_data = { + parenting_data.update({ "instance_label": label, "instance_id": c_instance.data["instance_id"] - } + }) else: # add review family if defined @@ -352,12 +353,12 @@ or updating already created. Publishing will create OTIO file. def _make_subset_naming( self, - family, _fpreset, future_instance_data ): shot_name = future_instance_data["shotName"] variant_name = future_instance_data["variant"] + family = _fpreset["family"] # get variant name from preset or from inharitance _variant_name = _fpreset.get("variant") or variant_name From 97492867cbcbc528499dc7c0c4490fafa4e945c3 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 14 Jul 2022 16:11:01 +0200 Subject: [PATCH 327/785] modified docstring --- .../plugins/publish/collect_simple_instances.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/traypublisher/plugins/publish/collect_simple_instances.py b/openpype/hosts/traypublisher/plugins/publish/collect_simple_instances.py index a5b95138bd..c0ae694c3c 100644 --- a/openpype/hosts/traypublisher/plugins/publish/collect_simple_instances.py +++ b/openpype/hosts/traypublisher/plugins/publish/collect_simple_instances.py @@ -8,15 +8,20 @@ import pyblish.api class CollectSettingsSimpleInstances(pyblish.api.InstancePlugin): """Collect data for instances created by settings creators. - Plugin create representations based on 'filepath' attribute stored - on instance. + Plugin create representations for simple instances based + on 'representation_files' attribute stored on instance data. + + There is also possibility to have reviewable representation which can be + stored under 'reviewable' attribute stored on instance data. If there was + already created representation with the same files as 'revieable' containes Representations can be marked for review and in that case is also added 'review' family to instance families. For review can be marked only one representation so **first** representation that has extension available in '_review_extensions' is used for review. - For 'source' on instance is used path from last created representation. + For instance 'source' is used path from last representation created + from 'representation_files'. Set staging directory on instance. That is probably never used because each created representation has it's own staging dir. From c3649e0a571048cb0660656ce9dcf10176399b5d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Thu, 14 Jul 2022 16:31:32 +0200 Subject: [PATCH 328/785] :bug: fix rfm api context for getting displays and multipart flag --- openpype/hosts/maya/api/lib_renderproducts.py | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/maya/api/lib_renderproducts.py b/openpype/hosts/maya/api/lib_renderproducts.py index 2d3bda5245..a8337ccf4d 100644 --- a/openpype/hosts/maya/api/lib_renderproducts.py +++ b/openpype/hosts/maya/api/lib_renderproducts.py @@ -1087,7 +1087,7 @@ class RenderProductsRenderman(ARenderProducts): "d_tiff": "tif" } - displays = get_displays()["displays"] + displays = get_displays(override_dst="render")["displays"] for name, display in displays.items(): enabled = display["params"]["enable"]["value"] if not enabled: @@ -1106,9 +1106,16 @@ class RenderProductsRenderman(ARenderProducts): display["driverNode"]["type"], "exr") for camera in cameras: - product = RenderProduct(productName=aov_name, - ext=extensions, - camera=camera) + # Create render product and set it as multipart only on + # display types supporting it. In all other cases, Renderman + # will create separate output per channel. + product = RenderProduct( + productName=aov_name, + ext=extensions, + camera=camera, + multipart=display["driverNode"]["type"] in ["d_openexr", "d_deepexr", "d_tiff"] # noqa + ) + products.append(product) return products From fbd239299f70105486db3276c218adfcccdf1e82 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Thu, 14 Jul 2022 17:00:26 +0200 Subject: [PATCH 329/785] :recycle: comment on non-multipart code and raise exception --- openpype/hosts/maya/api/lib_renderproducts.py | 32 +++++++++++++++---- 1 file changed, 26 insertions(+), 6 deletions(-) diff --git a/openpype/hosts/maya/api/lib_renderproducts.py b/openpype/hosts/maya/api/lib_renderproducts.py index a8337ccf4d..0bc8682290 100644 --- a/openpype/hosts/maya/api/lib_renderproducts.py +++ b/openpype/hosts/maya/api/lib_renderproducts.py @@ -1109,12 +1109,28 @@ class RenderProductsRenderman(ARenderProducts): # Create render product and set it as multipart only on # display types supporting it. In all other cases, Renderman # will create separate output per channel. - product = RenderProduct( - productName=aov_name, - ext=extensions, - camera=camera, - multipart=display["driverNode"]["type"] in ["d_openexr", "d_deepexr", "d_tiff"] # noqa - ) + if display["driverNode"]["type"] in ["d_openexr", "d_deepexr", "d_tiff"]: # noqa + product = RenderProduct( + productName=aov_name, + ext=extensions, + camera=camera, + multipart=True + ) + else: + # this code should handle the case where no multipart + # capable format is selected. But since it involves + # shady logic to determine what channel become what + # lets not do that as all productions will use exr anyway. + """ + for channel in display['params']['displayChannels']['value']: # noqa + product = RenderProduct( + productName="{}_{}".format(aov_name, channel), + ext=extensions, + camera=camera, + multipart=False + ) + """ + raise UnsupportedImageFormatException("Only exr, deep exr and tiff formats are supported.") products.append(product) @@ -1208,3 +1224,7 @@ class UnsupportedRendererException(Exception): Raised when requesting data from unsupported renderer. """ + + +class UnsupportedImageFormatException(Exception): + """Custom exception to report unsupported output image format.""" From ef5b571ea6dbfae4df12a18f8412a4e90a88cf1d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Thu, 14 Jul 2022 17:17:25 +0200 Subject: [PATCH 330/785] :dog: hound fix --- openpype/hosts/maya/api/lib_renderproducts.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/maya/api/lib_renderproducts.py b/openpype/hosts/maya/api/lib_renderproducts.py index 0bc8682290..123b934428 100644 --- a/openpype/hosts/maya/api/lib_renderproducts.py +++ b/openpype/hosts/maya/api/lib_renderproducts.py @@ -1130,7 +1130,8 @@ class RenderProductsRenderman(ARenderProducts): multipart=False ) """ - raise UnsupportedImageFormatException("Only exr, deep exr and tiff formats are supported.") + raise UnsupportedImageFormatException( + "Only exr, deep exr and tiff formats are supported.") products.append(product) From f346fb8cfd3b59d85b961cba5026cd8e0f66e21a Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 14 Jul 2022 17:38:01 +0200 Subject: [PATCH 331/785] implemented helper function for loading of internal data --- .../widgets/attribute_defs/files_widget.py | 29 ++++++++++++------- 1 file changed, 18 insertions(+), 11 deletions(-) diff --git a/openpype/widgets/attribute_defs/files_widget.py b/openpype/widgets/attribute_defs/files_widget.py index 508da4893b..9b4b1d6dc7 100644 --- a/openpype/widgets/attribute_defs/files_widget.py +++ b/openpype/widgets/attribute_defs/files_widget.py @@ -27,6 +27,20 @@ IS_SEQUENCE_ROLE = QtCore.Qt.UserRole + 7 EXT_ROLE = QtCore.Qt.UserRole + 8 +def convert_bytes_to_json(bytes_value): + if isinstance(bytes_value, QtCore.QByteArray): + # Raw data are already QByteArray and we don't have to load them + encoded_data = bytes_value + else: + encoded_data = QtCore.QByteArray.fromRawData(bytes_value) + stream = QtCore.QDataStream(encoded_data, QtCore.QIODevice.ReadOnly) + text = stream.readQString() + try: + return json.loads(text) + except Exception: + return None + + class SupportLabel(QtWidgets.QLabel): pass @@ -300,17 +314,10 @@ class FilesModel(QtGui.QStandardItemModel): return mime_data def dropMimeData(self, mime_data, action, row, col, index): - internal_move_data = mime_data.data("files_widget/internal_move") - if isinstance(internal_move_data, QtCore.QByteArray): - # Raw data are already QByteArrat and we don't have to load them - encoded_data = internal_move_data - else: - encoded_data = QtCore.QByteArray.fromRawData(internal_move_data) - stream = QtCore.QDataStream(encoded_data, QtCore.QIODevice.ReadOnly) - text = stream.readQString() - try: - item_ids = json.loads(text) - except Exception: + item_ids = convert_bytes_to_json( + mime_data.data("files_widget/internal_move") + ) + if item_ids is None: return False # Find matching item after which will be items moved From 8bfcbad8eb6f741e2d38203e63af3e9c53a1e26f Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 14 Jul 2022 17:39:33 +0200 Subject: [PATCH 332/785] trayp: wip hierarchical data and rename --- openpype/hosts/traypublisher/api/editorial.py | 178 ++++++++++++++++++ .../plugins/create/create_editorial.py | 34 ++-- .../schema_project_traypublisher.json | 3 +- 3 files changed, 201 insertions(+), 14 deletions(-) create mode 100644 openpype/hosts/traypublisher/api/editorial.py diff --git a/openpype/hosts/traypublisher/api/editorial.py b/openpype/hosts/traypublisher/api/editorial.py new file mode 100644 index 0000000000..4637d6d1df --- /dev/null +++ b/openpype/hosts/traypublisher/api/editorial.py @@ -0,0 +1,178 @@ +import re +from copy import deepcopy + +from openpype.client import get_asset_by_id + + +class ShotMetadataSover: + """Collecting hierarchy context from `parents` and `hierarchy` data + present in `clip` family instances coming from the request json data file + + It will add `hierarchical_context` into each instance for integrate + plugins to be able to create needed parents for the context if they + don't exist yet + """ + # presets + clip_name_tokenizer = None + shot_rename = True + shot_hierarchy = None + shot_add_tasks = None + + def __init__(self, creator_settings): + self.clip_name_tokenizer = creator_settings["clip_name_tokenizer"] + self.shot_rename = creator_settings["shot_rename"] + self.shot_hierarchy = creator_settings["shot_hierarchy"] + self.shot_add_tasks = creator_settings["shot_add_tasks"] + + def convert_to_entity(self, key, value): + # ftrack compatible entity types + types = {"shot": "Shot", + "folder": "Folder", + "episode": "Episode", + "sequence": "Sequence", + "track": "Sequence", + } + # convert to entity type + entity_type = types.get(key, None) + + # return if any + if entity_type: + return {"entity_type": entity_type, "entity_name": value} + + def _rename_template(self, clip_name, source_data): + if self.clip_name_tokenizer: + search_text = "" + parent_name = source_data["assetEntity"]["name"] + + search_text += parent_name + clip_name + source_data["anatomy_data"].update({"clip_name": clip_name}) + for type, pattern in self.clip_name_tokenizer.items(): + p = re.compile(pattern) + match = p.findall(search_text) + if not match: + continue + source_data["anatomy_data"][type] = match[-1] + + # format to new shot name + return self.shot_rename[ + "shot_rename_template"].format( + **source_data["anatomy_data"]) + + def _create_hierarchy(self, source_data): + asset_doc = source_data["selected_asset_doc"] + project_doc = source_data["project_doc"] + + project_name = project_doc["name"] + visual_hierarchy = [asset_doc] + current_doc = asset_doc + + # TODO: refactory withou the while + while True: + visual_parent_id = current_doc["data"]["visualParent"] + visual_parent = None + if visual_parent_id: + visual_parent = get_asset_by_id(project_name, visual_parent_id) + + if not visual_parent: + visual_hierarchy.append(project_doc) + break + visual_hierarchy.append(visual_parent) + current_doc = visual_parent + + # add current selection context hierarchy from standalonepublisher + parents = [] + parents.extend( + { + "entity_type": entity["data"]["entityType"], + "entity_name": entity["name"] + } + for entity in reversed(visual_hierarchy) + ) + + _hierarchy = [] + if self.shot_hierarchy.get("enabled"): + parent_template_patern = re.compile(r"\{([a-z]*?)\}") + # fill the parents parts from presets + shot_hierarchy = deepcopy(self.shot_hierarchy) + hierarchy_parents = shot_hierarchy["parents"] + + # fill parent keys data template from anatomy data + for parent_key in hierarchy_parents: + hierarchy_parents[parent_key] = hierarchy_parents[ + parent_key].format(**source_data["anatomy_data"]) + + for _index, _parent in enumerate( + shot_hierarchy["parents_path"].split("/")): + parent_filled = _parent.format(**hierarchy_parents) + parent_key = parent_template_patern.findall(_parent).pop() + + # in case SP context is set to the same folder + if (_index == 0) and ("folder" in parent_key) \ + and (parents[-1]["entity_name"] == parent_filled): + self.log.debug(f" skipping : {parent_filled}") + continue + + # in case first parent is project then start parents from start + if (_index == 0) and ("project" in parent_key): + self.log.debug("rebuilding parents from scratch") + project_parent = parents[0] + parents = [project_parent] + self.log.debug(f"project_parent: {project_parent}") + self.log.debug(f"parents: {parents}") + continue + + prnt = self.convert_to_entity( + parent_key, parent_filled) + parents.append(prnt) + _hierarchy.append(parent_filled) + + # convert hierarchy to string + hierarchy_path = "/".join(_hierarchy) + + output_data = { + "hierarchy": hierarchy_path, + "parents": parents + } + # print + self.log.debug(f"__ hierarchy_path: {hierarchy_path}") + self.log.debug(f"__ parents: {parents}") + + output_data["tasks"] = self._generate_tasks_from_settings(project_doc) + + return output_data + + def _generate_tasks_from_settings(self, project_doc): + tasks_to_add = {} + if self.shot_add_tasks: + project_tasks = project_doc["config"]["tasks"] + for task_name, task_data in self.shot_add_tasks.items(): + _task_data = deepcopy(task_data) + + # check if task type in project task types + if _task_data["type"] in project_tasks.keys(): + tasks_to_add[task_name] = _task_data + else: + raise KeyError( + "Missing task type `{}` for `{}` is not" + " existing in `{}``".format( + _task_data["type"], + task_name, + list(project_tasks.keys()) + ) + ) + + return tasks_to_add + + def generate_data(self, clip_name, source_data): + self.log.info(f"_ source_data: {source_data}") + + # match clip to shot name at start + shot_name = clip_name + + if self.shot_rename["enabled"]: + shot_name = self._rename_template(clip_name, source_data) + self.log.info(f"Renamed shot name: {shot_name}") + + hierarchy_data = self._create_hierarchy(source_data) + + return shot_name, hierarchy_data diff --git a/openpype/hosts/traypublisher/plugins/create/create_editorial.py b/openpype/hosts/traypublisher/plugins/create/create_editorial.py index 1ff729cf65..7672bb6222 100644 --- a/openpype/hosts/traypublisher/plugins/create/create_editorial.py +++ b/openpype/hosts/traypublisher/plugins/create/create_editorial.py @@ -2,12 +2,17 @@ import os from copy import deepcopy from pprint import pformat import opentimelineio as otio -from openpype.client import get_asset_by_name +from openpype.client import ( + get_asset_by_name, + get_project +) from openpype.hosts.traypublisher.api.plugin import ( TrayPublishCreator, InvisibleTrayPublishCreator ) - +from openpype.hosts.traypublisher.api.editorial import ( + ShotMetadataSover +) from openpype.pipeline import CreatedInstance @@ -173,6 +178,7 @@ or updating already created. Publishing will create OTIO file. ) # get this creator settings by identifier self._creator_settings = editorial_creators.get(self.identifier) + self._shot_metadata_solver = ShotMetadataSover(self._creator_settings) # try to set main attributes from settings if self._creator_settings.get("default_variants"): @@ -399,7 +405,19 @@ or updating already created. Publishing will create OTIO file. fps = clip_instance_properties["fps"] variant_name = clip_instance_properties["variant"] - shot_name = self._get_clip_name(clip, parent_asset_name) + # basic unique asset name + clip_name = os.path.splitext(clip.name)[0].lower() + + shot_name, shot_metadata = self._shot_metadata_solver.generate_data( + clip_name, + { + "anatomy_data": anatomy_data, + "selected_asset_doc": get_asset_by_name(parent_asset_name), + "project_doc": get_project(self.project_name) + } + ) + + self._validate_name_uniqueness(shot_name) timing_data = self._get_timing_data( clip, @@ -478,16 +496,6 @@ or updating already created. Publishing will create OTIO file. "sourceOut": int(source_out) } - def _get_clip_name(self, clip, selected_asset_name): - # basic unique asset name - clip_name = os.path.splitext(clip.name)[0].lower() - name = f"{selected_asset_name.split('_')[0]}_{clip_name}" - - # make sure the name is unique - self._validate_name_uniqueness(name) - - return clip_name - def _get_allowed_family_presets(self, pre_create_data): self.log.debug(f"__ pre_create_data: {pre_create_data}") return [ diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_traypublisher.json b/openpype/settings/entities/schemas/projects_schema/schema_project_traypublisher.json index 8d95cb19a9..3af3839c6f 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_traypublisher.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_traypublisher.json @@ -207,7 +207,8 @@ { "type": "task-types-enum", "key": "type", - "label": "Task type" + "label": "Task type", + "multiselection": false } ] } From ef515039a0a356f2ad5571f20d20976ae799563a Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 14 Jul 2022 17:40:01 +0200 Subject: [PATCH 333/785] added helper method for conversion of data to bytes --- openpype/widgets/attribute_defs/files_widget.py | 16 +++++++++++----- 1 file changed, 11 insertions(+), 5 deletions(-) diff --git a/openpype/widgets/attribute_defs/files_widget.py b/openpype/widgets/attribute_defs/files_widget.py index 9b4b1d6dc7..26398020ed 100644 --- a/openpype/widgets/attribute_defs/files_widget.py +++ b/openpype/widgets/attribute_defs/files_widget.py @@ -41,6 +41,13 @@ def convert_bytes_to_json(bytes_value): return None +def convert_data_to_bytes(data): + bytes_value = QtCore.QByteArray() + stream = QtCore.QDataStream(bytes_value, QtCore.QIODevice.WriteOnly) + stream.writeQString(json.dumps(data)) + return bytes_value + + class SupportLabel(QtWidgets.QLabel): pass @@ -306,11 +313,10 @@ class FilesModel(QtGui.QStandardItemModel): index.data(ITEM_ID_ROLE) for index in indexes ] - encoded_data = QtCore.QByteArray() - stream = QtCore.QDataStream(encoded_data, QtCore.QIODevice.WriteOnly) - stream.writeQString(json.dumps(item_ids)) - mime_data = super(FilesModel, self).mimeData(indexes) - mime_data.setData("files_widget/internal_move", encoded_data) + + item_ids_data = convert_data_to_bytes(item_ids) + mime_data = QtCore.QMimeData() + mime_data.setData("files_widget/internal_move", item_ids_data) return mime_data def dropMimeData(self, mime_data, action, row, col, index): From 53dd6cf11159b7732ce81ff05a533dd7e4affaa0 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 14 Jul 2022 17:45:39 +0200 Subject: [PATCH 334/785] it is possible to move file items across widgets --- .../widgets/attribute_defs/files_widget.py | 65 ++++++++++++++++++- 1 file changed, 64 insertions(+), 1 deletion(-) diff --git a/openpype/widgets/attribute_defs/files_widget.py b/openpype/widgets/attribute_defs/files_widget.py index 26398020ed..98f1d2738a 100644 --- a/openpype/widgets/attribute_defs/files_widget.py +++ b/openpype/widgets/attribute_defs/files_widget.py @@ -225,6 +225,7 @@ class FilesModel(QtGui.QStandardItemModel): def __init__(self, single_item, allow_sequences): super(FilesModel, self).__init__() + self._id = str(uuid.uuid4()) self._single_item = single_item self._multivalue = False self._allow_sequences = allow_sequences @@ -234,6 +235,10 @@ class FilesModel(QtGui.QStandardItemModel): self._filenames_by_dirpath = collections.defaultdict(set) self._items_by_dirpath = collections.defaultdict(list) + @property + def id(self): + return self._id + def set_multivalue(self, multivalue): """Disable filtering.""" @@ -315,8 +320,20 @@ class FilesModel(QtGui.QStandardItemModel): ] item_ids_data = convert_data_to_bytes(item_ids) - mime_data = QtCore.QMimeData() + mime_data = super(FilesModel, self).mimeData(indexes) mime_data.setData("files_widget/internal_move", item_ids_data) + + file_items = [] + for item_id in item_ids: + file_item = self.get_file_item_by_id(item_id) + if file_item: + file_items.append(file_item.to_dict()) + + full_item_data = convert_data_to_bytes({ + "items": file_items, + "id": self._id + }) + mime_data.setData("files_widget/full_data", full_item_data) return mime_data def dropMimeData(self, mime_data, action, row, col, index): @@ -858,6 +875,11 @@ class FilesWidget(QtWidgets.QFrame): event.setDropAction(QtCore.Qt.CopyAction) event.accept() + full_data_value = mime_data.data("files_widget/full_data") + if self._handle_full_data_drag(full_data_value): + event.setDropAction(QtCore.Qt.CopyAction) + event.accept() + def dragLeaveEvent(self, event): event.accept() @@ -868,6 +890,7 @@ class FilesWidget(QtWidgets.QFrame): mime_data = event.mimeData() if mime_data.hasUrls(): event.accept() + # event.setDropAction(QtCore.Qt.CopyAction) filepaths = [] for url in mime_data.urls(): filepath = url.toLocalFile() @@ -879,8 +902,48 @@ class FilesWidget(QtWidgets.QFrame): if filepaths: self._add_filepaths(filepaths) + if self._handle_full_data_drop( + mime_data.data("files_widget/full_data") + ): + event.accept() + event.setDropAction(QtCore.Qt.CopyAction) + + # print(self._files_model.id, event) super(FilesWidget, self).dropEvent(event) + def _handle_full_data_drag(self, value): + if value is None: + return False + + full_data = convert_bytes_to_json(value) + if full_data is None: + return False + + if full_data["id"] == self._files_model.id: + return False + return True + + def _handle_full_data_drop(self, value): + if value is None: + return False + + full_data = convert_bytes_to_json(value) + if full_data is None: + return False + + if full_data["id"] == self._files_model.id: + return False + + for item in full_data["items"]: + filepaths = [ + os.path.join(item["directory"], filename) + for filename in item["filenames"] + ] + filepaths = self._files_proxy_model.filter_valid_files(filepaths) + if filepaths: + self._add_filepaths(filepaths) + return True + def _add_filepaths(self, filepaths): self._files_model.add_filepaths(filepaths) self._update_visibility() From 9a83b83bb53837ffc84fb72f347a5c8875d9f6c1 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 14 Jul 2022 17:50:24 +0200 Subject: [PATCH 335/785] it is possible to copy file items --- openpype/widgets/attribute_defs/files_widget.py | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/openpype/widgets/attribute_defs/files_widget.py b/openpype/widgets/attribute_defs/files_widget.py index 98f1d2738a..d29aa1b607 100644 --- a/openpype/widgets/attribute_defs/files_widget.py +++ b/openpype/widgets/attribute_defs/files_widget.py @@ -905,10 +905,9 @@ class FilesWidget(QtWidgets.QFrame): if self._handle_full_data_drop( mime_data.data("files_widget/full_data") ): - event.accept() event.setDropAction(QtCore.Qt.CopyAction) + event.accept() - # print(self._files_model.id, event) super(FilesWidget, self).dropEvent(event) def _handle_full_data_drag(self, value): @@ -942,8 +941,19 @@ class FilesWidget(QtWidgets.QFrame): filepaths = self._files_proxy_model.filter_valid_files(filepaths) if filepaths: self._add_filepaths(filepaths) + + if self._copy_modifiers_enabled(): + return False return True + def _copy_modifiers_enabled(self): + if ( + QtWidgets.QApplication.keyboardModifiers() + & QtCore.Qt.ControlModifier + ): + return True + return False + def _add_filepaths(self, filepaths): self._files_model.add_filepaths(filepaths) self._update_visibility() From e0fe2e84b5225a47e9c64472a91d74691d46f5dc Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 14 Jul 2022 18:22:48 +0200 Subject: [PATCH 336/785] remove unnecessary line edit --- openpype/widgets/attribute_defs/widgets.py | 5 ----- 1 file changed, 5 deletions(-) diff --git a/openpype/widgets/attribute_defs/widgets.py b/openpype/widgets/attribute_defs/widgets.py index e4c4aba170..d0ba8814c7 100644 --- a/openpype/widgets/attribute_defs/widgets.py +++ b/openpype/widgets/attribute_defs/widgets.py @@ -379,11 +379,6 @@ class EnumAttrWidget(_BaseAttrDefWidget): combo_delegate = QtWidgets.QStyledItemDelegate(input_widget) input_widget.setItemDelegate(combo_delegate) - line_edit = QtWidgets.QLineEdit(input_widget) - line_edit.setReadOnly(True) - line_edit.setAttribute(QtCore.Qt.WA_TransparentForMouseEvents) - input_widget.setLineEdit(line_edit) - if self.attr_def.tooltip: input_widget.setToolTip(self.attr_def.tooltip) From bf82969c1422cfc87379b75d859cb93e5cf983c6 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 15 Jul 2022 09:38:51 +0200 Subject: [PATCH 337/785] trayp: shot metadata solver final --- openpype/hosts/traypublisher/api/editorial.py | 240 ++++++++++-------- 1 file changed, 133 insertions(+), 107 deletions(-) diff --git a/openpype/hosts/traypublisher/api/editorial.py b/openpype/hosts/traypublisher/api/editorial.py index 4637d6d1df..d6cc99f87c 100644 --- a/openpype/hosts/traypublisher/api/editorial.py +++ b/openpype/hosts/traypublisher/api/editorial.py @@ -12,6 +12,9 @@ class ShotMetadataSover: plugins to be able to create needed parents for the context if they don't exist yet """ + + NO_DECOR_PATERN = re.compile(r"\{([a-z]*?)\}") + # presets clip_name_tokenizer = None shot_rename = True @@ -24,49 +27,106 @@ class ShotMetadataSover: self.shot_hierarchy = creator_settings["shot_hierarchy"] self.shot_add_tasks = creator_settings["shot_add_tasks"] - def convert_to_entity(self, key, value): - # ftrack compatible entity types - types = {"shot": "Shot", - "folder": "Folder", - "episode": "Episode", - "sequence": "Sequence", - "track": "Sequence", - } - # convert to entity type - entity_type = types.get(key, None) + def _rename_template(self, data): + # format to new shot name + return self.shot_rename[ + "shot_rename_template"].format(**data) - # return if any - if entity_type: - return {"entity_type": entity_type, "entity_name": value} + def _generate_tokens(self, clip_name, source_data): + output_data = deepcopy(source_data["anatomy_data"]) + output_data["clip_name"] = clip_name - def _rename_template(self, clip_name, source_data): - if self.clip_name_tokenizer: - search_text = "" - parent_name = source_data["assetEntity"]["name"] + if not self.clip_name_tokenizer: + return output_data - search_text += parent_name + clip_name - source_data["anatomy_data"].update({"clip_name": clip_name}) - for type, pattern in self.clip_name_tokenizer.items(): - p = re.compile(pattern) - match = p.findall(search_text) - if not match: - continue - source_data["anatomy_data"][type] = match[-1] + parent_name = source_data["selected_asset_doc"]["name"] - # format to new shot name - return self.shot_rename[ - "shot_rename_template"].format( - **source_data["anatomy_data"]) + search_text = parent_name + clip_name - def _create_hierarchy(self, source_data): - asset_doc = source_data["selected_asset_doc"] - project_doc = source_data["project_doc"] + for token_key, pattern in self.clip_name_tokenizer.items(): + p = re.compile(pattern) + match = p.findall(search_text) + if not match: + continue + # QUESTION:how to refactory `match[-1]` to some better way? + output_data[token_key] = match[-1] + return output_data + + def _create_parents_from_settings(self, parents, data): + + # fill the parents parts from presets + shot_hierarchy = deepcopy(self.shot_hierarchy) + hierarchy_parents = shot_hierarchy["parents"] + + # fill parent keys data template from anatomy data + _parent_tokens_formating_data = { + parent_token["name"]: parent_token["value"].format(**data) + for parent_token in hierarchy_parents + } + _parent_tokens_type = { + parent_token["name"]: parent_token["type"] + for parent_token in hierarchy_parents + } + for _index, _parent in enumerate( + shot_hierarchy["parents_path"].split("/") + ): + # format parent token with value which is formated + parent_name = _parent.format( + **_parent_tokens_formating_data) + parent_token_name = ( + self.NO_DECOR_PATERN.findall(_parent).pop()) + + if not parent_token_name: + raise KeyError( + f"Parent token is not found in: `{_parent}`") + + # find parent type + parent_token_type = _parent_tokens_type[parent_token_name] + + # in case selected context is set to the same asset + if ( + _index == 0 + and parents[-1]["entity_name"] == parent_name + ): + self.log.debug(f" skipping : {parent_name}") + continue + + # in case first parent is project then start parents from start + if ( + _index == 0 + and parent_token_type == "project" + ): + self.log.debug("rebuilding parents from scratch") + project_parent = parents[0] + parents = [project_parent] + continue + + parents.append({ + "entity_type": parent_token_type, + "entity_name": parent_name + }) + + self.log.debug(f"__ parents: {parents}") + + return parents + + def _create_hierarchy_path(self, parents): + return "/".join( + [p for p in parents if p["entity_type"] != "project"] + ) if parents else "" + + def _get_parents_from_selected_asset( + self, + asset_doc, + project_doc + ): project_name = project_doc["name"] visual_hierarchy = [asset_doc] current_doc = asset_doc - # TODO: refactory withou the while + # looping trought all available visual parents + # if they are not available anymore than it breaks while True: visual_parent_id = current_doc["data"]["visualParent"] visual_parent = None @@ -79,100 +139,66 @@ class ShotMetadataSover: visual_hierarchy.append(visual_parent) current_doc = visual_parent - # add current selection context hierarchy from standalonepublisher - parents = [] - parents.extend( + # add current selection context hierarchy + return [ { "entity_type": entity["data"]["entityType"], "entity_name": entity["name"] } for entity in reversed(visual_hierarchy) - ) - - _hierarchy = [] - if self.shot_hierarchy.get("enabled"): - parent_template_patern = re.compile(r"\{([a-z]*?)\}") - # fill the parents parts from presets - shot_hierarchy = deepcopy(self.shot_hierarchy) - hierarchy_parents = shot_hierarchy["parents"] - - # fill parent keys data template from anatomy data - for parent_key in hierarchy_parents: - hierarchy_parents[parent_key] = hierarchy_parents[ - parent_key].format(**source_data["anatomy_data"]) - - for _index, _parent in enumerate( - shot_hierarchy["parents_path"].split("/")): - parent_filled = _parent.format(**hierarchy_parents) - parent_key = parent_template_patern.findall(_parent).pop() - - # in case SP context is set to the same folder - if (_index == 0) and ("folder" in parent_key) \ - and (parents[-1]["entity_name"] == parent_filled): - self.log.debug(f" skipping : {parent_filled}") - continue - - # in case first parent is project then start parents from start - if (_index == 0) and ("project" in parent_key): - self.log.debug("rebuilding parents from scratch") - project_parent = parents[0] - parents = [project_parent] - self.log.debug(f"project_parent: {project_parent}") - self.log.debug(f"parents: {parents}") - continue - - prnt = self.convert_to_entity( - parent_key, parent_filled) - parents.append(prnt) - _hierarchy.append(parent_filled) - - # convert hierarchy to string - hierarchy_path = "/".join(_hierarchy) - - output_data = { - "hierarchy": hierarchy_path, - "parents": parents - } - # print - self.log.debug(f"__ hierarchy_path: {hierarchy_path}") - self.log.debug(f"__ parents: {parents}") - - output_data["tasks"] = self._generate_tasks_from_settings(project_doc) - - return output_data + ] def _generate_tasks_from_settings(self, project_doc): tasks_to_add = {} - if self.shot_add_tasks: - project_tasks = project_doc["config"]["tasks"] - for task_name, task_data in self.shot_add_tasks.items(): - _task_data = deepcopy(task_data) - # check if task type in project task types - if _task_data["type"] in project_tasks.keys(): - tasks_to_add[task_name] = _task_data - else: - raise KeyError( - "Missing task type `{}` for `{}` is not" - " existing in `{}``".format( - _task_data["type"], - task_name, - list(project_tasks.keys()) - ) + project_tasks = project_doc["config"]["tasks"] + for task_name, task_data in self.shot_add_tasks.items(): + _task_data = deepcopy(task_data) + + # check if task type in project task types + if _task_data["type"] in project_tasks.keys(): + tasks_to_add[task_name] = _task_data + else: + raise KeyError( + "Missing task type `{}` for `{}` is not" + " existing in `{}``".format( + _task_data["type"], + task_name, + list(project_tasks.keys()) ) + ) return tasks_to_add def generate_data(self, clip_name, source_data): self.log.info(f"_ source_data: {source_data}") + tasks = {} + asset_doc = source_data["selected_asset_doc"] + project_doc = source_data["project_doc"] + # match clip to shot name at start shot_name = clip_name + # parse all tokens and generate formating data + formating_data = self._generate_tokens(shot_name, source_data) + + # generate parents from selected asset + parents = self._get_parents_from_selected_asset(asset_doc, project_doc) + if self.shot_rename["enabled"]: - shot_name = self._rename_template(clip_name, source_data) + shot_name = self._rename_template(clip_name, formating_data) self.log.info(f"Renamed shot name: {shot_name}") - hierarchy_data = self._create_hierarchy(source_data) + if self.shot_hierarchy["enabled"]: + parents = self._create_parents_from_settings(formating_data) - return shot_name, hierarchy_data + if self.shot_add_tasks: + tasks = self._generate_tasks_from_settings( + project_doc) + + return shot_name, { + "hierarchy": self._create_hierarchy_path(parents), + "parents": parents, + "tasks": tasks + } From a6c029d9fb1462054ca74817393beaebf32c7346 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 15 Jul 2022 10:13:32 +0200 Subject: [PATCH 338/785] skip validation of zou id --- openpype/modules/kitsu/utils/sync_service.py | 17 ++++------------- 1 file changed, 4 insertions(+), 13 deletions(-) diff --git a/openpype/modules/kitsu/utils/sync_service.py b/openpype/modules/kitsu/utils/sync_service.py index 3848eda7ae..441b95a7ec 100644 --- a/openpype/modules/kitsu/utils/sync_service.py +++ b/openpype/modules/kitsu/utils/sync_service.py @@ -4,7 +4,8 @@ import gazu from openpype.client import ( get_project, - get_assets + get_assets, + get_asset_by_name ) from openpype.pipeline import AvalonMongoDB from .credentials import validate_credentials @@ -350,18 +351,8 @@ class Listener: # Find asset doc parent_name = task["entity"]["name"] - parent_zou_id = task["entity"]["id"] - asset_docs = get_assets( - project_name, - asset_names=[parent_name], - fields=["_id", "data.zou.id", "data.tasks"] - ) - asset_doc = None - for _asset_doc in asset_docs: - doc_zou_id = _asset_doc.get("data", {}).get("zou", {}).get("id") - if doc_zou_id == parent_zou_id: - asset_doc = _asset_doc - break + + asset_doc = get_asset_by_name(project_name, parent_name) # Update asset tasks with new one asset_tasks = asset_doc["data"].get("tasks") From c210c93b325aaef0aac5f90fddbe5253a4702166 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 15 Jul 2022 10:15:32 +0200 Subject: [PATCH 339/785] changes from comments --- openpype/modules/kitsu/utils/update_op_with_zou.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/openpype/modules/kitsu/utils/update_op_with_zou.py b/openpype/modules/kitsu/utils/update_op_with_zou.py index a68d6d31c3..4695a49159 100644 --- a/openpype/modules/kitsu/utils/update_op_with_zou.py +++ b/openpype/modules/kitsu/utils/update_op_with_zou.py @@ -39,14 +39,14 @@ def create_op_asset(gazu_entity: dict) -> dict: } -def get_kitsu_project_name(project_id: str)->str: +def get_kitsu_project_name(project_id: str) -> str: """Get project name based on project id in kitsu. Args: - project_id (str): Id of project in Kitsu. + project_id (str): UUID of project in Kitsu. Returns: - str: Project name which has project in Kitsu. + str: Name of Kitsu project. """ project = gazu.project.get_project(project_id) @@ -178,7 +178,7 @@ def update_op_assets( asset_doc_ids[parent_zou_id]["_id"] if parent_zou_id else None ) if visual_parent_doc_id is None: - # Find root folder doc + # Find root folder docs root_folder_docs = get_assets( project_name, asset_name=[entity_parent_folders[-1]], From baa1256b380630b0d23f4a57ed34431e947d4c85 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 15 Jul 2022 11:22:22 +0200 Subject: [PATCH 340/785] added settings to define when cycle review session creation happens --- .../defaults/project_settings/ftrack.json | 5 ++++ .../schema_project_ftrack.json | 25 +++++++++++++++++++ 2 files changed, 30 insertions(+) diff --git a/openpype/settings/defaults/project_settings/ftrack.json b/openpype/settings/defaults/project_settings/ftrack.json index 831c34835e..b102b340be 100644 --- a/openpype/settings/defaults/project_settings/ftrack.json +++ b/openpype/settings/defaults/project_settings/ftrack.json @@ -124,6 +124,11 @@ "Project Manager" ], "cycle_enabled": false, + "cycle_hour_start": [ + 0, + 0, + 0 + ], "review_session_template": "{yy}{mm}{dd}" } }, diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_ftrack.json b/openpype/settings/entities/schemas/projects_schema/schema_project_ftrack.json index c0069dcdab..4119184ca9 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_ftrack.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_ftrack.json @@ -412,6 +412,31 @@ "key": "cycle_enabled", "label": "Create daily review session" }, + { + "type": "list-strict", + "key": "cycle_hour_start", + "label": "Create daily review session at", + "tooltip": "This may take affect on next day", + "object_types": [ + { + "label": "HMS", + "type": "number", + "minimum": 0, + "maximum": 23, + "decimal": 0 + }, { + "type": "number", + "minimum": 0, + "maximum": 59, + "decimal": 0 + }, { + "type": "number", + "minimum": 0, + "maximum": 59, + "decimal": 0 + } + ] + }, { "type": "separator" }, From beb53f7ccb513cd37a54a19b366ecccf120b8605 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 15 Jul 2022 11:23:07 +0200 Subject: [PATCH 341/785] use time settings to determine when to trigger creation --- .../action_create_review_session.py | 63 ++++++++++++------- 1 file changed, 42 insertions(+), 21 deletions(-) diff --git a/openpype/modules/ftrack/event_handlers_server/action_create_review_session.py b/openpype/modules/ftrack/event_handlers_server/action_create_review_session.py index 8a8e86e7b9..68d498a83a 100644 --- a/openpype/modules/ftrack/event_handlers_server/action_create_review_session.py +++ b/openpype/modules/ftrack/event_handlers_server/action_create_review_session.py @@ -6,7 +6,10 @@ import collections import ftrack_api from openpype.lib import get_datetime_data -from openpype.api import get_project_settings +from openpype.settings.lib import ( + get_project_settings, + get_default_project_settings +) from openpype_modules.ftrack.lib import ServerAction @@ -79,6 +82,35 @@ class CreateDailyReviewSessionServerAction(ServerAction): ) return True + def _calculate_next_cycle_delta(self): + studio_default_settings = get_default_project_settings() + action_settings = ( + studio_default_settings + ["ftrack"] + [self.settings_frack_subkey] + [self.settings_key] + ) + cycle_hour_start = action_settings.get("cycle_hour_start") + if not cycle_hour_start: + h = m = s = 0 + else: + h, m, s = cycle_hour_start + + # Create threading timer which will trigger creation of report + # at the 00:00:01 of next day + # - callback will trigger another timer which will have 1 day offset + now = datetime.datetime.now() + # Create object of today morning + expected_next_trigger = datetime.datetime( + now.year, now.month, now.day, h, m, s + ) + if expected_next_trigger > now: + seconds = (expected_next_trigger - now).total_seconds() + else: + expected_next_trigger += self._day_delta + seconds = (expected_next_trigger - now).total_seconds() + return seconds, expected_next_trigger + def register(self, *args, **kwargs): """Override register to be able trigger """ # Register server action as would be normally @@ -86,22 +118,12 @@ class CreateDailyReviewSessionServerAction(ServerAction): *args, **kwargs ) - # Create threading timer which will trigger creation of report - # at the 00:00:01 of next day - # - callback will trigger another timer which will have 1 day offset - now = datetime.datetime.now() - # Create object of today morning - today_morning = datetime.datetime( - now.year, now.month, now.day, 0, 0, 1 - ) - # Add a day delta (to calculate next day date) - next_day_morning = today_morning + self._day_delta - # Calculate first delta in seconds for first threading timer - first_delta = (next_day_morning - now).total_seconds() + seconds_delta, cycle_time = self._calculate_next_cycle_delta() + # Store cycle time which will be used to create next timer - self._last_cyle_time = next_day_morning + self._last_cyle_time = cycle_time # Create timer thread - self._cycle_timer = threading.Timer(first_delta, self._timer_callback) + self._cycle_timer = threading.Timer(seconds_delta, self._timer_callback) self._cycle_timer.start() self._check_review_session() @@ -111,13 +133,12 @@ class CreateDailyReviewSessionServerAction(ServerAction): self._cycle_timer is not None and self._last_cyle_time is not None ): - now = datetime.datetime.now() - while self._last_cyle_time < now: - self._last_cyle_time = self._last_cyle_time + self._day_delta + seconds_delta, cycle_time = self._calculate_next_cycle_delta() + self._last_cyle_time = cycle_time - delay = (self._last_cyle_time - now).total_seconds() - - self._cycle_timer = threading.Timer(delay, self._timer_callback) + self._cycle_timer = threading.Timer( + seconds_delta, self._timer_callback + ) self._cycle_timer.start() self._check_review_session() From e74f526def58d48fe5d77c44ea5b3615d595da4b Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 15 Jul 2022 11:42:48 +0200 Subject: [PATCH 342/785] modified labels --- .../projects_schema/schema_project_ftrack.json | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_ftrack.json b/openpype/settings/entities/schemas/projects_schema/schema_project_ftrack.json index 4119184ca9..e008fd85ee 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_ftrack.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_ftrack.json @@ -410,7 +410,10 @@ { "type": "boolean", "key": "cycle_enabled", - "label": "Create daily review session" + "label": "Run automatically every day" + }, + { + "type": "separator" }, { "type": "list-strict", @@ -419,17 +422,19 @@ "tooltip": "This may take affect on next day", "object_types": [ { - "label": "HMS", + "label": "H:", "type": "number", "minimum": 0, "maximum": 23, "decimal": 0 }, { + "label": "M:", "type": "number", "minimum": 0, "maximum": 59, "decimal": 0 }, { + "label": "S:", "type": "number", "minimum": 0, "maximum": 59, @@ -437,6 +442,10 @@ } ] }, + { + "type": "label", + "label": "This can't be overriden per project and any change will take effect on the next day or on restart of event server." + }, { "type": "separator" }, From 7f9cdaaa0e90b8c00dcdc91a523d09e1f8d32459 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 15 Jul 2022 11:43:19 +0200 Subject: [PATCH 343/785] trayp: updating settings --- .../defaults/project_settings/traypublisher.json | 10 +++++----- .../projects_schema/schema_project_traypublisher.json | 8 ++++---- 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/openpype/settings/defaults/project_settings/traypublisher.json b/openpype/settings/defaults/project_settings/traypublisher.json index 93f6420c21..82c82c79e9 100644 --- a/openpype/settings/defaults/project_settings/traypublisher.json +++ b/openpype/settings/defaults/project_settings/traypublisher.json @@ -245,17 +245,17 @@ "parents_path": "{project}/{folder}/{sequence}", "parents": [ { - "type": "project", - "name": "projekt", - "value": "{projekt[name]}" + "type": "Project", + "name": "project", + "value": "{project[name]}" }, { - "type": "folder", + "type": "Folder", "name": "folder", "value": "shots" }, { - "type": "sequence", + "type": "Sequence", "name": "sequence", "value": "{_sequence_}" } diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_traypublisher.json b/openpype/settings/entities/schemas/projects_schema/schema_project_traypublisher.json index 3af3839c6f..909ee02b04 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_traypublisher.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_traypublisher.json @@ -173,10 +173,10 @@ "key": "type", "label": "Parent type", "enum_items": [ - {"project": "Project"}, - {"folder": "Folder"}, - {"episode": "Episode"}, - {"sequence": "Sequence"} + {"Project": "Project"}, + {"Folder": "Folder"}, + {"Episode": "Episode"}, + {"Sequence": "Sequence"} ] }, { From a8e4fdba5fa11f1c0a66e90f9e6ed9429212e9d9 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 15 Jul 2022 11:43:39 +0200 Subject: [PATCH 344/785] trayp: editorial with hierarchy and parents --- openpype/hosts/traypublisher/api/editorial.py | 69 ++++++++++++++----- .../plugins/create/create_editorial.py | 21 ++++-- 2 files changed, 70 insertions(+), 20 deletions(-) diff --git a/openpype/hosts/traypublisher/api/editorial.py b/openpype/hosts/traypublisher/api/editorial.py index d6cc99f87c..713f1b5c6c 100644 --- a/openpype/hosts/traypublisher/api/editorial.py +++ b/openpype/hosts/traypublisher/api/editorial.py @@ -2,7 +2,7 @@ import re from copy import deepcopy from openpype.client import get_asset_by_id - +from openpype.pipeline.create import CreatorError class ShotMetadataSover: """Collecting hierarchy context from `parents` and `hierarchy` data @@ -21,16 +21,27 @@ class ShotMetadataSover: shot_hierarchy = None shot_add_tasks = None - def __init__(self, creator_settings): + def __init__(self, creator_settings, logger): self.clip_name_tokenizer = creator_settings["clip_name_tokenizer"] self.shot_rename = creator_settings["shot_rename"] self.shot_hierarchy = creator_settings["shot_hierarchy"] self.shot_add_tasks = creator_settings["shot_add_tasks"] + self.log = logger + def _rename_template(self, data): - # format to new shot name - return self.shot_rename[ - "shot_rename_template"].format(**data) + shot_rename_template = self.shot_rename[ + "shot_rename_template"] + try: + # format to new shot name + return shot_rename_template.format(**data) + except KeyError as _E: + raise CreatorError(( + "Make sure all keys are correct in settings: \n\n" + f"From template string {shot_rename_template} > " + f"`{_E}` has no equivalent in \n" + f"{list(data.keys())} input formating keys!" + )) def _generate_tokens(self, clip_name, source_data): output_data = deepcopy(source_data["anatomy_data"]) @@ -47,7 +58,13 @@ class ShotMetadataSover: p = re.compile(pattern) match = p.findall(search_text) if not match: - continue + raise CreatorError(( + "Make sure regex expression is correct: \n\n" + f"From settings '{token_key}' key " + f"with '{pattern}' expression, \n" + f"is not able to find anything in '{search_text}'!" + )) + # QUESTION:how to refactory `match[-1]` to some better way? output_data[token_key] = match[-1] @@ -60,10 +77,17 @@ class ShotMetadataSover: hierarchy_parents = shot_hierarchy["parents"] # fill parent keys data template from anatomy data - _parent_tokens_formating_data = { - parent_token["name"]: parent_token["value"].format(**data) - for parent_token in hierarchy_parents - } + try: + _parent_tokens_formating_data = { + parent_token["name"]: parent_token["value"].format(**data) + for parent_token in hierarchy_parents + } + except KeyError as _E: + raise CreatorError(( + "Make sure all keys are correct in settings: \n" + f"`{_E}` has no equivalent in \n{list(data.keys())}" + )) + _parent_tokens_type = { parent_token["name"]: parent_token["type"] for parent_token in hierarchy_parents @@ -72,8 +96,17 @@ class ShotMetadataSover: shot_hierarchy["parents_path"].split("/") ): # format parent token with value which is formated - parent_name = _parent.format( - **_parent_tokens_formating_data) + try: + parent_name = _parent.format( + **_parent_tokens_formating_data) + except KeyError as _E: + raise CreatorError(( + "Make sure all keys are correct in settings: \n\n" + f"From template string {shot_hierarchy['parents_path']} > " + f"`{_E}` has no equivalent in \n" + f"{list(_parent_tokens_formating_data.keys())} parents" + )) + parent_token_name = ( self.NO_DECOR_PATERN.findall(_parent).pop()) @@ -95,7 +128,7 @@ class ShotMetadataSover: # in case first parent is project then start parents from start if ( _index == 0 - and parent_token_type == "project" + and parent_token_type == "Project" ): self.log.debug("rebuilding parents from scratch") project_parent = parents[0] @@ -113,7 +146,10 @@ class ShotMetadataSover: def _create_hierarchy_path(self, parents): return "/".join( - [p for p in parents if p["entity_type"] != "project"] + [ + p["entity_name"] for p in parents + if p["entity_type"] != "Project" + ] ) if parents else "" def _get_parents_from_selected_asset( @@ -187,11 +223,12 @@ class ShotMetadataSover: parents = self._get_parents_from_selected_asset(asset_doc, project_doc) if self.shot_rename["enabled"]: - shot_name = self._rename_template(clip_name, formating_data) + shot_name = self._rename_template(formating_data) self.log.info(f"Renamed shot name: {shot_name}") if self.shot_hierarchy["enabled"]: - parents = self._create_parents_from_settings(formating_data) + parents = self._create_parents_from_settings( + parents, formating_data) if self.shot_add_tasks: tasks = self._generate_tasks_from_settings( diff --git a/openpype/hosts/traypublisher/plugins/create/create_editorial.py b/openpype/hosts/traypublisher/plugins/create/create_editorial.py index 7672bb6222..6bcc692240 100644 --- a/openpype/hosts/traypublisher/plugins/create/create_editorial.py +++ b/openpype/hosts/traypublisher/plugins/create/create_editorial.py @@ -178,7 +178,8 @@ or updating already created. Publishing will create OTIO file. ) # get this creator settings by identifier self._creator_settings = editorial_creators.get(self.identifier) - self._shot_metadata_solver = ShotMetadataSover(self._creator_settings) + self._shot_metadata_solver = ShotMetadataSover( + self._creator_settings, self.log) # try to set main attributes from settings if self._creator_settings.get("default_variants"): @@ -407,13 +408,22 @@ or updating already created. Publishing will create OTIO file. # basic unique asset name clip_name = os.path.splitext(clip.name)[0].lower() + project_doc = get_project(self.project_name) shot_name, shot_metadata = self._shot_metadata_solver.generate_data( clip_name, { - "anatomy_data": anatomy_data, - "selected_asset_doc": get_asset_by_name(parent_asset_name), - "project_doc": get_project(self.project_name) + "anatomy_data": { + "project": { + "name": self.project_name, + "code": project_doc["data"]["code"] + }, + "parent": parent_asset_name, + "app": self.host_name + }, + "selected_asset_doc": get_asset_by_name( + self.project_name, parent_asset_name), + "project_doc": project_doc } ) @@ -429,6 +439,7 @@ or updating already created. Publishing will create OTIO file. # create creator attributes creator_attributes = { "asset_name": shot_name, + "Parent hierarchy path": shot_metadata["hierarchy"], "workfile_start_frame": workfile_start_frame, "fps": fps, "handle_start": int(handle_start), @@ -451,6 +462,8 @@ or updating already created. Publishing will create OTIO file. # creator_attributes "creator_attributes": creator_attributes } + # add hierarchy shot metadata + base_instance_data.update(shot_metadata) return base_instance_data From af06e1a8511979ccc1398f1fda9887c98289f0ee Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 15 Jul 2022 12:02:19 +0200 Subject: [PATCH 345/785] fix too long line --- .../event_handlers_server/action_create_review_session.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/openpype/modules/ftrack/event_handlers_server/action_create_review_session.py b/openpype/modules/ftrack/event_handlers_server/action_create_review_session.py index 68d498a83a..21382007a0 100644 --- a/openpype/modules/ftrack/event_handlers_server/action_create_review_session.py +++ b/openpype/modules/ftrack/event_handlers_server/action_create_review_session.py @@ -123,7 +123,9 @@ class CreateDailyReviewSessionServerAction(ServerAction): # Store cycle time which will be used to create next timer self._last_cyle_time = cycle_time # Create timer thread - self._cycle_timer = threading.Timer(seconds_delta, self._timer_callback) + self._cycle_timer = threading.Timer( + seconds_delta, self._timer_callback + ) self._cycle_timer.start() self._check_review_session() From 9f2bbbbe386d874cee06fb8b684d377cfb5cd339 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 15 Jul 2022 13:12:09 +0200 Subject: [PATCH 346/785] OP-3589 - added thumbnail extract to traypublisher Added check if thumbnail representation is not present, created by different plugin by any chance. ExtractThumbnailSP should be removed when SP is removed, no need to copy and have 2 plugins. --- openpype/plugins/publish/extract_thumbnail.py | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/openpype/plugins/publish/extract_thumbnail.py b/openpype/plugins/publish/extract_thumbnail.py index 7a438ca701..e6df5b3ee0 100644 --- a/openpype/plugins/publish/extract_thumbnail.py +++ b/openpype/plugins/publish/extract_thumbnail.py @@ -22,7 +22,7 @@ class ExtractThumbnail(pyblish.api.InstancePlugin): "imagesequence", "render", "render2d", "prerender", "source", "plate", "take" ] - hosts = ["shell", "fusion", "resolve"] + hosts = ["shell", "fusion", "resolve", "traypublisher"] enabled = False # presetable attribute @@ -46,6 +46,10 @@ class ExtractThumbnail(pyblish.api.InstancePlugin): self.log.info("Skipping - no review set on instance.") return + if self._has_thumbnail_already(instance): + self.log.info("Thumbnail representation already present.") + return + filtered_repres = self._get_filtered_repres(instance) for repre in filtered_repres: repre_files = repre["files"] @@ -102,6 +106,14 @@ class ExtractThumbnail(pyblish.api.InstancePlugin): # There is no need to create more then one thumbnail break + def _has_thumbnail_already(self, instance): + for repre in instance.data.get("representations", []): + self.log.info("repre {}".format(repre)) + if repre["name"] == "thumbnail": + return True + + return False + def _get_filtered_repres(self, instance): filtered_repres = [] src_repres = instance.data.get("representations") or [] From a8c219211d5f2dddaf6b4dd31fd59d1137bbd0c1 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 15 Jul 2022 13:25:42 +0200 Subject: [PATCH 347/785] OP-3589 - copied ValidateFrameRange from SP SP will be removed in the future, copied this to TrayPublisher to keep functionality. --- .../publish/help/validate_frame_ranges.xml | 15 ++++ .../plugins/publish/validate_frame_ranges.py | 72 +++++++++++++++++++ 2 files changed, 87 insertions(+) create mode 100644 openpype/hosts/traypublisher/plugins/publish/help/validate_frame_ranges.xml create mode 100644 openpype/hosts/traypublisher/plugins/publish/validate_frame_ranges.py diff --git a/openpype/hosts/traypublisher/plugins/publish/help/validate_frame_ranges.xml b/openpype/hosts/traypublisher/plugins/publish/help/validate_frame_ranges.xml new file mode 100644 index 0000000000..933df1c7c5 --- /dev/null +++ b/openpype/hosts/traypublisher/plugins/publish/help/validate_frame_ranges.xml @@ -0,0 +1,15 @@ + + + +Invalid frame range + +## Invalid frame range + +Expected duration or '{duration}' frames set in database, workfile contains only '{found}' frames. + +### How to repair? + +Modify configuration in the database or tweak frame range in the workfile. + + + \ No newline at end of file diff --git a/openpype/hosts/traypublisher/plugins/publish/validate_frame_ranges.py b/openpype/hosts/traypublisher/plugins/publish/validate_frame_ranges.py new file mode 100644 index 0000000000..89289fc6d4 --- /dev/null +++ b/openpype/hosts/traypublisher/plugins/publish/validate_frame_ranges.py @@ -0,0 +1,72 @@ +import re + +import pyblish.api + +import openpype.api +from openpype import lib +from openpype.pipeline import ( + PublishXmlValidationError, + OptionalPyblishPluginMixin +) + + +class ValidateFrameRange(OptionalPyblishPluginMixin, + pyblish.api.InstancePlugin): + """Validating frame range of rendered files against state in DB.""" + + label = "Validate Frame Range" + hosts = ["traypublisher"] + families = ["render"] + order = openpype.api.ValidateContentsOrder + + optional = True + # published data might be sequence (.mov, .mp4) in that counting files + # doesnt make sense + check_extensions = ["exr", "dpx", "jpg", "jpeg", "png", "tiff", "tga", + "gif", "svg"] + skip_timelines_check = [] # skip for specific task names (regex) + + def process(self, instance): + # Skip the instance if is not active by data on the instance + if not self.is_active(instance.data): + return + + if any(re.search(pattern, instance.data["task"]) + for pattern in self.skip_timelines_check): + self.log.info("Skipping for {} task".format(instance.data["task"])) + + asset_data = lib.get_asset(instance.data["asset"])["data"] + frame_start = asset_data["frameStart"] + frame_end = asset_data["frameEnd"] + handle_start = asset_data["handleStart"] + handle_end = asset_data["handleEnd"] + duration = (frame_end - frame_start + 1) + handle_start + handle_end + + repre = instance.data.get("representations", [None]) + if not repre: + self.log.info("No representations, skipping.") + return + + ext = repre[0]['ext'].replace(".", '') + + if not ext or ext.lower() not in self.check_extensions: + self.log.warning("Cannot check for extension {}".format(ext)) + return + + files = instance.data.get("representations", [None])[0]["files"] + if isinstance(files, str): + files = [files] + frames = len(files) + + msg = "Frame duration from DB:'{}' ". format(int(duration)) +\ + " doesn't match number of files:'{}'".format(frames) +\ + " Please change frame range for Asset or limit no. of files" + + formatting_data = {"duration": duration, + "found": frames} + if frames != duration: + raise PublishXmlValidationError(self, msg, + formatting_data=formatting_data) + + self.log.debug("Valid ranges expected '{}' - found '{}'". + format(int(duration), frames)) From 870cc2bc0607b36328f03c2b75be4c5b12e39d6e Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 15 Jul 2022 13:34:26 +0200 Subject: [PATCH 348/785] fix hash of oiio centos file --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 4bdaaab4ed..078503a284 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -135,7 +135,7 @@ hash = "b9950f5d2fa3720b52b8be55bacf5f56d33f9e029d38ee86534995f3d8d253d2" [openpype.thirdparty.oiio.linux] url = "https://distribute.openpype.io/thirdparty/oiio_tools-2.2.20-linux-centos7.tgz" -hash = "be1abf8a50e9da5913298447421af0a17829d83ed6252ae1d40da7fa36a78787" +hash = "3894dec7e4e521463891a869586850e8605f5fd604858b674c87323bf33e273d" [openpype.thirdparty.oiio.darwin] url = "https://distribute.openpype.io/thirdparty/oiio-2.2.0-darwin.tgz" From fe68a07a90d057526b5c65854528d67ce637c629 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 15 Jul 2022 14:16:43 +0200 Subject: [PATCH 349/785] trayp: update editorial creator --- .../hosts/traypublisher/plugins/create/create_editorial.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/traypublisher/plugins/create/create_editorial.py b/openpype/hosts/traypublisher/plugins/create/create_editorial.py index 6bcc692240..ffff5de70a 100644 --- a/openpype/hosts/traypublisher/plugins/create/create_editorial.py +++ b/openpype/hosts/traypublisher/plugins/create/create_editorial.py @@ -255,7 +255,8 @@ or updating already created. Publishing will create OTIO file. # Pass precreate data to creator attributes data.update({ - "sequence_file_path": file_path + "sequenceFilePath": file_path, + "otioTimeline": otio.adapters.write_to_string(otio_timeline) }) self._create_instance(self.family, subset_name, data) @@ -324,6 +325,7 @@ or updating already created. Publishing will create OTIO file. _fpreset, future_instance_data ) + future_instance_data["label"] = label # add file extension filter only if it is not shot family if family == "shot": From b22b28edbc3a6af4e9e68f801a5a5c4a5c13be27 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 15 Jul 2022 14:17:02 +0200 Subject: [PATCH 350/785] trayp: publishing editorial --- .../publish/collect_editorial_instances.py | 45 +++++++++---------- 1 file changed, 20 insertions(+), 25 deletions(-) diff --git a/openpype/hosts/traypublisher/plugins/publish/collect_editorial_instances.py b/openpype/hosts/traypublisher/plugins/publish/collect_editorial_instances.py index 6521c97774..c088709a61 100644 --- a/openpype/hosts/traypublisher/plugins/publish/collect_editorial_instances.py +++ b/openpype/hosts/traypublisher/plugins/publish/collect_editorial_instances.py @@ -1,15 +1,17 @@ import os from pprint import pformat import pyblish.api +import opentimelineio as otio class CollectSettingsSimpleInstances(pyblish.api.InstancePlugin): """Collect data for instances created by settings creators.""" label = "Collect Editorial Instances" - order = pyblish.api.CollectorOrder - 0.49 + order = pyblish.api.CollectorOrder hosts = ["traypublisher"] + families = ["editorial"] def process(self, instance): @@ -18,34 +20,27 @@ class CollectSettingsSimpleInstances(pyblish.api.InstancePlugin): if "representations" not in instance.data: instance.data["representations"] = [] - repres = instance.data["representations"] - self.log.debug( - pformat(dict(instance.data)) - ) - creator_attributes = instance.data["creator_attributes"] - filepath_item = creator_attributes["filepath"] - self.log.info(filepath_item) - filepaths = [ - os.path.join(filepath_item["directory"], filename) - for filename in filepath_item["filenames"] - ] - instance.data["sourceFilepaths"] = filepaths - instance.data["stagingDir"] = filepath_item["directory"] + fpath = instance.data["sequenceFilePath"] + otio_timeline_string = instance.data.pop("otioTimeline") + otio_timeline = otio.adapters.read_from_string( + otio_timeline_string) - filenames = filepath_item["filenames"] - _, ext = os.path.splitext(filenames[0]) - ext = ext[1:] - if len(filenames) == 1: - filenames = filenames[0] + instance.context.data["otioTimeline"] = otio_timeline - repres.append({ - "ext": ext, - "name": ext, - "stagingDir": filepath_item["directory"], - "files": filenames + self.log.info(fpath) + + instance.data["stagingDir"] = os.path.dirname(fpath) + + _, ext = os.path.splitext(fpath) + + instance.data["representations"].append({ + "ext": ext[1:], + "name": ext[1:], + "stagingDir": instance.data["stagingDir"], + "files": os.path.basename(fpath) }) self.log.debug("Created Simple Settings instance {}".format( - instance.data + pformat(instance.data) )) From fb586feaf3dec0eeabe370f512426c03df8d7289 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 15 Jul 2022 14:17:31 +0200 Subject: [PATCH 351/785] general: label could be set from instance data --- openpype/plugins/publish/collect_from_create_context.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/plugins/publish/collect_from_create_context.py b/openpype/plugins/publish/collect_from_create_context.py index d2be633cbe..e070cc411d 100644 --- a/openpype/plugins/publish/collect_from_create_context.py +++ b/openpype/plugins/publish/collect_from_create_context.py @@ -44,7 +44,7 @@ class CollectFromCreateContext(pyblish.api.ContextPlugin): "subset": subset, "asset": in_data["asset"], "task": in_data["task"], - "label": subset, + "label": in_data.get("label") or subset, "name": subset, "family": in_data["family"], "families": instance_families, From 0111f4ae0f70b48e1098f8e28ed50ac969f1a8f3 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 15 Jul 2022 14:38:59 +0200 Subject: [PATCH 352/785] added option to launch openpype with interactive console --- openpype/cli.py | 16 +++++++++++++++- openpype/pype_commands.py | 2 +- 2 files changed, 16 insertions(+), 2 deletions(-) diff --git a/openpype/cli.py b/openpype/cli.py index 2aa4a46929..d6970f2509 100644 --- a/openpype/cli.py +++ b/openpype/cli.py @@ -2,7 +2,7 @@ """Package for handling pype command line arguments.""" import os import sys - +import code import click # import sys @@ -424,3 +424,17 @@ def pack_project(project, dirpath): def unpack_project(zipfile, root): """Create a package of project with all files and database dump.""" PypeCommands().unpack_project(zipfile, root) + + +@main.command() +def interactive(): + """Interative (Python like) console. + + Helpfull command not only for development to directly work with python + interpreter. + + Warning: + Executable 'openpype_gui' on windows won't work. + """ + + code.interact() diff --git a/openpype/pype_commands.py b/openpype/pype_commands.py index 90c582a319..124eacbe39 100644 --- a/openpype/pype_commands.py +++ b/openpype/pype_commands.py @@ -7,7 +7,7 @@ import time from openpype.lib import PypeLogger from openpype.api import get_app_environments_for_context -from openpype.lib.plugin_tools import parse_json, get_batch_asset_task_info +from openpype.lib.plugin_tools import get_batch_asset_task_info from openpype.lib.remote_publish import ( get_webpublish_conn, start_webpublish_log, From b60acdd6d77d488464ffdfad871b076926d94896 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 15 Jul 2022 14:45:32 +0200 Subject: [PATCH 353/785] added banner to interpreter --- openpype/cli.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/openpype/cli.py b/openpype/cli.py index d6970f2509..9a2dfaa141 100644 --- a/openpype/cli.py +++ b/openpype/cli.py @@ -437,4 +437,9 @@ def interactive(): Executable 'openpype_gui' on windows won't work. """ - code.interact() + from openpype.version import __version__ + + banner = "OpenPype {}\nPython {} on {}".format( + __version__, sys.version, sys.platform + ) + code.interact(banner) From a395a85b67065e34c445e2b522afedb8d0f6cf42 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 15 Jul 2022 15:07:11 +0200 Subject: [PATCH 354/785] OP-3446 - explicitly adding review Creator now should handle adding review on instance and on applicable representation tags. --- .../plugins/create/create_mov_batch.py | 15 +++++++++++++-- .../plugins/publish/collect_mov_batch.py | 17 +++++++++++++++-- 2 files changed, 28 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/traypublisher/plugins/create/create_mov_batch.py b/openpype/hosts/traypublisher/plugins/create/create_mov_batch.py index 20d3ecbd7c..67f8848e05 100644 --- a/openpype/hosts/traypublisher/plugins/create/create_mov_batch.py +++ b/openpype/hosts/traypublisher/plugins/create/create_mov_batch.py @@ -4,7 +4,7 @@ import re from openpype.client import get_assets, get_asset_by_name from openpype.hosts.traypublisher.api import pipeline -from openpype.lib import FileDef, get_subset_name_with_asset_doc +from openpype.lib import FileDef, BoolDef, get_subset_name_with_asset_doc from openpype.pipeline import ( CreatedInstance, CreatorError @@ -151,7 +151,13 @@ class BatchMovCreator(TrayPublishCreator): return task_name def get_instance_attr_defs(self): - return [] + return [ + BoolDef( + "add_review_family", + default=True, + label="Review" + ) + ] def get_pre_create_attr_defs(self): # Use same attributes as for instance attributes @@ -162,6 +168,11 @@ class BatchMovCreator(TrayPublishCreator): single_item=False, extensions=self.extensions, label="Filepath" + ), + BoolDef( + "add_review_family", + default=True, + label="Review" ) ] diff --git a/openpype/hosts/traypublisher/plugins/publish/collect_mov_batch.py b/openpype/hosts/traypublisher/plugins/publish/collect_mov_batch.py index c81d1f77a5..e4011d0003 100644 --- a/openpype/hosts/traypublisher/plugins/publish/collect_mov_batch.py +++ b/openpype/hosts/traypublisher/plugins/publish/collect_mov_batch.py @@ -2,12 +2,17 @@ import os import pyblish.api from openpype.pipeline import OpenPypePyblishPluginMixin +from openpype.lib import BoolDef class CollectMovBatch( pyblish.api.InstancePlugin, OpenPypePyblishPluginMixin ): - """Collect file url for batch mov and create representation.""" + """Collect file url for batch mov and create representation. + + Adds review on instance and to repre.tags based on value of toggle button + on creator. + """ label = "Collect Mov Batch Files" order = pyblish.api.CollectorOrder @@ -18,7 +23,9 @@ class CollectMovBatch( if not instance.data.get("creator_identifier") == "render_mov_batch": return - file_url = instance.data["creator_attributes"]["filepath"] + creator_attributes = instance.data["creator_attributes"] + + file_url = creator_attributes["filepath"] file_name = os.path.basename(file_url) _, ext = os.path.splitext(file_name) @@ -29,6 +36,12 @@ class CollectMovBatch( "stagingDir": os.path.dirname(file_url) } + if creator_attributes["add_review_family"]: + if not repre.get("tags"): + repre["tags"] = [] + repre["tags"].append("review") + instance.data["families"].append("review") + instance.data["representations"].append(repre) instance.data["source"] = file_url From a1b93b1d4812cd44dc20f8f69e5f6953d91e7d52 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 15 Jul 2022 15:08:16 +0200 Subject: [PATCH 355/785] OP-3446 - Hound --- .../hosts/traypublisher/plugins/publish/collect_mov_batch.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/hosts/traypublisher/plugins/publish/collect_mov_batch.py b/openpype/hosts/traypublisher/plugins/publish/collect_mov_batch.py index e4011d0003..99065d2408 100644 --- a/openpype/hosts/traypublisher/plugins/publish/collect_mov_batch.py +++ b/openpype/hosts/traypublisher/plugins/publish/collect_mov_batch.py @@ -2,7 +2,6 @@ import os import pyblish.api from openpype.pipeline import OpenPypePyblishPluginMixin -from openpype.lib import BoolDef class CollectMovBatch( From b9be23496924fe4ac99e764b11a82db348ef0b3e Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 15 Jul 2022 15:19:07 +0200 Subject: [PATCH 356/785] removed default host used on deregister of host --- openpype/pipeline/context_tools.py | 24 +----------------------- 1 file changed, 1 insertion(+), 23 deletions(-) diff --git a/openpype/pipeline/context_tools.py b/openpype/pipeline/context_tools.py index e719e46514..fd4dc6e3fd 100644 --- a/openpype/pipeline/context_tools.py +++ b/openpype/pipeline/context_tools.py @@ -240,29 +240,7 @@ def registered_host(): def deregister_host(): - _registered_host["_"] = default_host() - - -def default_host(): - """A default host, in place of anything better - - This may be considered as reference for the - interface a host must implement. It also ensures - that the system runs, even when nothing is there - to support it. - - """ - - host = types.ModuleType("defaultHost") - - def ls(): - return list() - - host.__dict__.update({ - "ls": ls - }) - - return host + _registered_host["_"] = None def debug_host(): From 636e46cfd673f13bba211024bf1b56180f17abad Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 15 Jul 2022 15:29:18 +0200 Subject: [PATCH 357/785] implemented functions to query project and asset documents based on current context --- openpype/pipeline/context_tools.py | 52 ++++++++++++++++++++++++++++++ 1 file changed, 52 insertions(+) diff --git a/openpype/pipeline/context_tools.py b/openpype/pipeline/context_tools.py index fd4dc6e3fd..80ad939ccd 100644 --- a/openpype/pipeline/context_tools.py +++ b/openpype/pipeline/context_tools.py @@ -10,6 +10,11 @@ import pyblish.api from pyblish.lib import MessageHandler import openpype +from openpype.client import ( + get_project, + get_asset_by_id, + get_asset_by_name, +) from openpype.modules import load_modules, ModulesManager from openpype.settings import get_project_settings from openpype.lib import filter_pyblish_plugins @@ -282,3 +287,50 @@ def debug_host(): }) return host + + +def get_current_project(fields=None): + """Helper function to get project document based on global Session. + + This function should be called only in process where host is installed. + + Returns: + dict: Project document. + None: Project is not set. + """ + + project_name = legacy_io.active_project() + return get_project(project_name, fields=fields) + + +def get_current_project_asset(asset_name=None, asset_id=None, fields=None): + """Helper function to get asset document based on global Session. + + This function should be called only in process where host is installed. + + Asset is found out based on passed asset name or id (not both). Asset name + is not used for filtering if asset id is passed. When both asset name and + id are missing then asset name from current process is used. + + Args: + asset_name (str): Name of asset used for filter. + asset_id (Union[str, ObjectId]): Asset document id. If entered then + is used as only filter. + fields (Union[List[str], None]): Limit returned data of asset documents + to specific keys. + + Returns: + dict: Asset document. + None: Asset is not set or not exist. + """ + + project_name = legacy_io.active_project() + if asset_id: + return get_asset_by_id(project_name, asset_id, fields=fields) + + if not asset_name: + asset_name = legacy_io.Session.get("AVALON_ASSET") + # Skip if is not set even on context + if not asset_name: + return None + return get_asset_by_name(project_name, asset_name, fields=fields) From 35bd841939a78a5d963bb2972c4b14b0bace13b4 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 15 Jul 2022 15:31:51 +0200 Subject: [PATCH 358/785] marked 'get_asset' as deprecated --- openpype/lib/avalon_context.py | 12 +++--------- 1 file changed, 3 insertions(+), 9 deletions(-) diff --git a/openpype/lib/avalon_context.py b/openpype/lib/avalon_context.py index 76ed6cbbd3..7ed22d6de6 100644 --- a/openpype/lib/avalon_context.py +++ b/openpype/lib/avalon_context.py @@ -236,7 +236,7 @@ def any_outdated(): return False -@with_pipeline_io +@deprecated("openpype.pipeline.context_tools.get_current_project_asset") def get_asset(asset_name=None): """ Returning asset document from database by its name. @@ -249,15 +249,9 @@ def get_asset(asset_name=None): (MongoDB document) """ - project_name = legacy_io.active_project() - if not asset_name: - asset_name = legacy_io.Session["AVALON_ASSET"] + from openpype.pipeline.context_tools import get_current_project_asset - asset_document = get_asset_by_name(project_name, asset_name) - if not asset_document: - raise TypeError("Entity \"{}\" was not found in DB".format(asset_name)) - - return asset_document + return get_current_project_asset(asset_name=asset_name) def get_system_general_anatomy_data(system_settings=None): From de0c0effe60e38f07bc47f577f8e5fb67f61814c Mon Sep 17 00:00:00 2001 From: jrsndlr Date: Fri, 15 Jul 2022 15:39:54 +0200 Subject: [PATCH 359/785] reencode with concat, fix audio --- .../plugins/publish/extract_review_slate.py | 51 ++++++++++--------- 1 file changed, 27 insertions(+), 24 deletions(-) diff --git a/openpype/plugins/publish/extract_review_slate.py b/openpype/plugins/publish/extract_review_slate.py index 28685c2e90..737b7db295 100644 --- a/openpype/plugins/publish/extract_review_slate.py +++ b/openpype/plugins/publish/extract_review_slate.py @@ -285,36 +285,32 @@ class ExtractReviewSlate(openpype.api.Extractor): audio_channels, audio_sample_rate, audio_channel_layout, + input_frame_rate ) # replace slate with silent slate for concat slate_v_path = slate_silent_path - # create ffmpeg concat text file path - conc_text_file = input_file.replace(ext, "") + "_concat" + ".txt" - conc_text_path = os.path.join( - os.path.normpath(stagingdir), conc_text_file) - _remove_at_end.append(conc_text_path) - self.log.debug("__ conc_text_path: {}".format(conc_text_path)) - - new_line = "\n" - with open(conc_text_path, "w") as conc_text_f: - conc_text_f.writelines([ - "file {}".format( - slate_v_path.replace("\\", "/")), - new_line, - "file {}".format(input_path.replace("\\", "/")) - ]) - - # concat slate and videos together + # concat slate and videos together with concat filter + # this will reencode the output + if input_audio: + fmap = [ + "[0:v] [0:a] [1:v] [1:a] concat=n=2:v=1:a=1 [v] [a]", + "-map", '[v]', + "-map", '[a]' + ] + else: + fmap = [ + "[0:v] [1:v] concat=n=2:v=1:a=0 [v]", + "-map", '[v]' + ] concat_args = [ ffmpeg_path, - "-y", - "-f", "concat", - "-safe", "0", - "-i", conc_text_path, - "-c", "copy", + "-i", slate_v_path, + "-i", input_path, + "-filter_complex", ] + concat_args.extend(fmap) if offset_timecode: concat_args.extend(["-timecode", offset_timecode]) # NOTE: Added because of OP Atom demuxers @@ -328,6 +324,10 @@ class ExtractReviewSlate(openpype.api.Extractor): copy_args = ( "-metadata", "-metadata:s:v:0", + "-codec:v", + "-pixfmt", + "-b:v", + "-b:a", ) args = source_ffmpeg_cmd.split(" ") for indx, arg in enumerate(args): @@ -335,12 +335,14 @@ class ExtractReviewSlate(openpype.api.Extractor): concat_args.append(arg) # assumes arg has one parameter concat_args.append(args[indx + 1]) + concat_args.append("-y") # add final output path concat_args.append(output_path) # ffmpeg concat subprocess self.log.debug( - "Executing concat: {}".format(" ".join(concat_args)) + "Executing concat filter: {}".format + (" ".join(concat_args)) ) openpype.api.run_subprocess( concat_args, logger=self.log @@ -488,9 +490,10 @@ class ExtractReviewSlate(openpype.api.Extractor): audio_channels, audio_sample_rate, audio_channel_layout, + input_frame_rate ): # Get duration of one frame in micro seconds - items = audio_sample_rate.split("/") + items = input_frame_rate.split("/") if len(items) == 1: one_frame_duration = 1.0 / float(items[0]) elif len(items) == 2: From ad8a7c86e4b655014e6dc776c813e9966cb9e1f3 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 15 Jul 2022 15:57:01 +0200 Subject: [PATCH 360/785] use 'get_current_project_asset' in hosts --- openpype/hosts/harmony/api/pipeline.py | 5 ++++- openpype/hosts/hiero/api/plugin.py | 3 ++- openpype/hosts/houdini/api/lib.py | 4 ++-- openpype/hosts/maya/api/lib.py | 14 ++++++++------ .../hosts/maya/plugins/create/create_render.py | 6 +++--- .../maya/plugins/publish/validate_maya_units.py | 10 +++++++--- openpype/hosts/nuke/api/lib.py | 4 ++-- .../hosts/nuke/plugins/publish/validate_script.py | 10 +++++----- openpype/hosts/resolve/api/plugin.py | 4 ++-- .../plugins/publish/collect_editorial.py | 3 ++- .../plugins/publish/collect_editorial_instances.py | 8 ++++++-- .../plugins/publish/validate_frame_ranges.py | 5 +++-- .../hosts/unreal/plugins/load/load_animation.py | 9 ++++++--- openpype/hosts/unreal/plugins/load/load_layout.py | 5 +++-- 14 files changed, 55 insertions(+), 35 deletions(-) diff --git a/openpype/hosts/harmony/api/pipeline.py b/openpype/hosts/harmony/api/pipeline.py index 86b5753f7e..94ca134205 100644 --- a/openpype/hosts/harmony/api/pipeline.py +++ b/openpype/hosts/harmony/api/pipeline.py @@ -15,6 +15,7 @@ from openpype.pipeline import ( deregister_creator_plugin_path, AVALON_CONTAINER_ID, ) +from openpype.pipeline.context_tools import get_current_project_asset import openpype.hosts.harmony import openpype.hosts.harmony.api as harmony @@ -50,7 +51,9 @@ def get_asset_settings(): dict: Scene data. """ - asset_data = lib.get_asset()["data"] + + asset_doc = get_current_project_asset() + asset_data = asset_doc["data"] fps = asset_data.get("fps") frame_start = asset_data.get("frameStart") frame_end = asset_data.get("frameEnd") diff --git a/openpype/hosts/hiero/api/plugin.py b/openpype/hosts/hiero/api/plugin.py index add416d04e..28a9dfb492 100644 --- a/openpype/hosts/hiero/api/plugin.py +++ b/openpype/hosts/hiero/api/plugin.py @@ -10,6 +10,7 @@ import qargparse import openpype.api as openpype from openpype.pipeline import LoaderPlugin, LegacyCreator +from openpype.pipeline.context_tools import get_current_project_asset from . import lib log = openpype.Logger().get_logger(__name__) @@ -484,7 +485,7 @@ class ClipLoader: """ asset_name = self.context["representation"]["context"]["asset"] - asset_doc = openpype.get_asset(asset_name) + asset_doc = get_current_project_asset(asset_name) log.debug("__ asset_doc: {}".format(pformat(asset_doc))) self.data["assetData"] = asset_doc["data"] diff --git a/openpype/hosts/houdini/api/lib.py b/openpype/hosts/houdini/api/lib.py index dd8a5ba473..c8a7f92bb9 100644 --- a/openpype/hosts/houdini/api/lib.py +++ b/openpype/hosts/houdini/api/lib.py @@ -5,8 +5,8 @@ from contextlib import contextmanager import six from openpype.client import get_asset_by_name -from openpype.api import get_asset from openpype.pipeline import legacy_io +from openpype.pipeline.context_tools import get_current_project_asset import hou @@ -16,7 +16,7 @@ log = logging.getLogger(__name__) def get_asset_fps(): """Return current asset fps.""" - return get_asset()["data"].get("fps") + return get_current_project_asset()["data"].get("fps") def set_id(node, unique_id, overwrite=False): diff --git a/openpype/hosts/maya/api/lib.py b/openpype/hosts/maya/api/lib.py index e4221978c0..58e160cb2f 100644 --- a/openpype/hosts/maya/api/lib.py +++ b/openpype/hosts/maya/api/lib.py @@ -23,7 +23,6 @@ from openpype.client import ( get_last_versions, get_representation_by_name ) -from openpype import lib from openpype.api import get_anatomy_settings from openpype.pipeline import ( legacy_io, @@ -33,6 +32,7 @@ from openpype.pipeline import ( load_container, registered_host, ) +from openpype.pipeline.context_tools import get_current_project_asset from .commands import reset_frame_range @@ -2174,7 +2174,7 @@ def reset_scene_resolution(): project_name = legacy_io.active_project() project_doc = get_project(project_name) project_data = project_doc["data"] - asset_data = lib.get_asset()["data"] + asset_data = get_current_project_asset()["data"] # Set project resolution width_key = "resolutionWidth" @@ -2208,7 +2208,8 @@ def set_context_settings(): project_name = legacy_io.active_project() project_doc = get_project(project_name) project_data = project_doc["data"] - asset_data = lib.get_asset()["data"] + asset_doc = get_current_project_asset(fields=["data.fps"]) + asset_data = asset_doc.get("data", {}) # Set project fps fps = asset_data.get("fps", project_data.get("fps", 25)) @@ -2233,7 +2234,7 @@ def validate_fps(): """ - fps = lib.get_asset()["data"]["fps"] + fps = get_current_project_asset(fields=["data.fps"])["data"]["fps"] # TODO(antirotor): This is hack as for framerates having multiple # decimal places. FTrack is ceiling decimal values on # fps to two decimal places but Maya 2019+ is reporting those fps @@ -3051,8 +3052,9 @@ def update_content_on_context_change(): This will update scene content to match new asset on context change """ scene_sets = cmds.listSets(allSets=True) - new_asset = legacy_io.Session["AVALON_ASSET"] - new_data = lib.get_asset()["data"] + asset_doc = get_current_project_asset() + new_asset = asset_doc["name"] + new_data = asset_doc["data"] for s in scene_sets: try: if cmds.getAttr("{}.id".format(s)) == "pyblish.avalon.instance": diff --git a/openpype/hosts/maya/plugins/create/create_render.py b/openpype/hosts/maya/plugins/create/create_render.py index 93ee6679e5..de07a0b23d 100644 --- a/openpype/hosts/maya/plugins/create/create_render.py +++ b/openpype/hosts/maya/plugins/create/create_render.py @@ -15,13 +15,13 @@ from openpype.hosts.maya.api import ( from openpype.lib import requests_get from openpype.api import ( get_system_settings, - get_project_settings, - get_asset) + get_project_settings) from openpype.modules import ModulesManager from openpype.pipeline import ( CreatorError, legacy_io, ) +from openpype.pipeline.context_tools import get_current_project_asset class CreateRender(plugin.Creator): @@ -413,7 +413,7 @@ class CreateRender(plugin.Creator): prefix, type="string") - asset = get_asset() + asset = get_current_project_asset() if renderer == "arnold": # set format to exr diff --git a/openpype/hosts/maya/plugins/publish/validate_maya_units.py b/openpype/hosts/maya/plugins/publish/validate_maya_units.py index d5a8c350d5..5f67adec76 100644 --- a/openpype/hosts/maya/plugins/publish/validate_maya_units.py +++ b/openpype/hosts/maya/plugins/publish/validate_maya_units.py @@ -2,8 +2,8 @@ import maya.cmds as cmds import pyblish.api import openpype.api -from openpype import lib import openpype.hosts.maya.api.lib as mayalib +from openpype.pipeline.context_tools import get_current_project_asset from math import ceil @@ -41,7 +41,9 @@ class ValidateMayaUnits(pyblish.api.ContextPlugin): # now flooring the value? fps = float_round(context.data.get('fps'), 2, ceil) - asset_fps = lib.get_asset()["data"]["fps"] + # TODO repace query with using 'context.data["assetEntity"]' + asset_doc = get_current_project_asset() + asset_fps = asset_doc["data"]["fps"] self.log.info('Units (linear): {0}'.format(linearunits)) self.log.info('Units (angular): {0}'.format(angularunits)) @@ -91,5 +93,7 @@ class ValidateMayaUnits(pyblish.api.ContextPlugin): cls.log.debug(current_linear) cls.log.info("Setting time unit to match project") - asset_fps = lib.get_asset()["data"]["fps"] + # TODO repace query with using 'context.data["assetEntity"]' + asset_doc = get_current_project_asset() + asset_fps = asset_doc["data"]["fps"] mayalib.set_scene_fps(asset_fps) diff --git a/openpype/hosts/nuke/api/lib.py b/openpype/hosts/nuke/api/lib.py index 0929415c00..7be7c1169c 100644 --- a/openpype/hosts/nuke/api/lib.py +++ b/openpype/hosts/nuke/api/lib.py @@ -24,7 +24,6 @@ from openpype.api import ( BuildWorkfile, get_version_from_path, get_workdir_data, - get_asset, get_current_project_settings, ) from openpype.tools.utils import host_tools @@ -40,6 +39,7 @@ from openpype.pipeline import ( legacy_io, Anatomy, ) +from openpype.pipeline.context_tools import get_current_project_asset from . import gizmo_menu @@ -1766,7 +1766,7 @@ class WorkfileSettings(object): kwargs.get("asset_name") or legacy_io.Session["AVALON_ASSET"] ) - self._asset_entity = get_asset(self._asset) + self._asset_entity = get_current_project_asset(self._asset) self._root_node = root_node or nuke.root() self._nodes = self.get_nodes(nodes=nodes) diff --git a/openpype/hosts/nuke/plugins/publish/validate_script.py b/openpype/hosts/nuke/plugins/publish/validate_script.py index 9bda0da85e..b8d7494b9d 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_script.py +++ b/openpype/hosts/nuke/plugins/publish/validate_script.py @@ -1,7 +1,6 @@ import pyblish.api -from openpype.client import get_project, get_asset_by_id -from openpype import lib +from openpype.client import get_project, get_asset_by_id, get_asset_by_name from openpype.pipeline import legacy_io @@ -17,10 +16,11 @@ class ValidateScript(pyblish.api.InstancePlugin): def process(self, instance): ctx_data = instance.context.data - asset_name = ctx_data["asset"] - asset = lib.get_asset(asset_name) - asset_data = asset["data"] project_name = legacy_io.active_project() + asset_name = ctx_data["asset"] + # TODO repace query with using 'instance.data["assetEntity"]' + asset = get_asset_by_name(project_name, asset_name) + asset_data = asset["data"] # These attributes will be checked attributes = [ diff --git a/openpype/hosts/resolve/api/plugin.py b/openpype/hosts/resolve/api/plugin.py index 49b478fb3b..b03125d502 100644 --- a/openpype/hosts/resolve/api/plugin.py +++ b/openpype/hosts/resolve/api/plugin.py @@ -4,11 +4,11 @@ import uuid import qargparse from Qt import QtWidgets, QtCore -import openpype.api as pype from openpype.pipeline import ( LegacyCreator, LoaderPlugin, ) +from openpype.pipeline.context_tools import get_current_project_asset from openpype.hosts import resolve from . import lib @@ -375,7 +375,7 @@ class ClipLoader: """ asset_name = self.context["representation"]["context"]["asset"] - self.data["assetData"] = pype.get_asset(asset_name)["data"] + self.data["assetData"] = get_current_project_asset(asset_name)["data"] def load(self): # create project bin for the media to be imported into diff --git a/openpype/hosts/standalonepublisher/plugins/publish/collect_editorial.py b/openpype/hosts/standalonepublisher/plugins/publish/collect_editorial.py index 0a1d29ccdc..8633d4bf9d 100644 --- a/openpype/hosts/standalonepublisher/plugins/publish/collect_editorial.py +++ b/openpype/hosts/standalonepublisher/plugins/publish/collect_editorial.py @@ -19,6 +19,7 @@ import os import opentimelineio as otio import pyblish.api from openpype import lib as plib +from openpype.pipeline.context_tools import get_current_project_asset class OTIO_View(pyblish.api.Action): @@ -116,7 +117,7 @@ class CollectEditorial(pyblish.api.InstancePlugin): if extension == ".edl": # EDL has no frame rate embedded so needs explicit # frame rate else 24 is asssumed. - kwargs["rate"] = plib.get_asset()["data"]["fps"] + kwargs["rate"] = get_current_project_asset()["data"]["fps"] instance.data["otio_timeline"] = otio.adapters.read_from_file( file_path, **kwargs) diff --git a/openpype/hosts/standalonepublisher/plugins/publish/collect_editorial_instances.py b/openpype/hosts/standalonepublisher/plugins/publish/collect_editorial_instances.py index d0d36bb717..3237fbbe12 100644 --- a/openpype/hosts/standalonepublisher/plugins/publish/collect_editorial_instances.py +++ b/openpype/hosts/standalonepublisher/plugins/publish/collect_editorial_instances.py @@ -1,8 +1,12 @@ import os +from copy import deepcopy + import opentimelineio as otio import pyblish.api + from openpype import lib as plib -from copy import deepcopy +from openpype.pipeline.context_tools import get_current_project_asset + class CollectInstances(pyblish.api.InstancePlugin): """Collect instances from editorial's OTIO sequence""" @@ -48,7 +52,7 @@ class CollectInstances(pyblish.api.InstancePlugin): # get timeline otio data timeline = instance.data["otio_timeline"] - fps = plib.get_asset()["data"]["fps"] + fps = get_current_project_asset()["data"]["fps"] tracks = timeline.each_child( descended_from_type=otio.schema.Track diff --git a/openpype/hosts/standalonepublisher/plugins/publish/validate_frame_ranges.py b/openpype/hosts/standalonepublisher/plugins/publish/validate_frame_ranges.py index 005157af62..ff7f60354e 100644 --- a/openpype/hosts/standalonepublisher/plugins/publish/validate_frame_ranges.py +++ b/openpype/hosts/standalonepublisher/plugins/publish/validate_frame_ranges.py @@ -3,8 +3,8 @@ import re import pyblish.api import openpype.api -from openpype import lib from openpype.pipeline import PublishXmlValidationError +from openpype.pipeline.context_tools import get_current_project_asset class ValidateFrameRange(pyblish.api.InstancePlugin): @@ -27,7 +27,8 @@ class ValidateFrameRange(pyblish.api.InstancePlugin): for pattern in self.skip_timelines_check): self.log.info("Skipping for {} task".format(instance.data["task"])) - asset_data = lib.get_asset(instance.data["asset"])["data"] + # TODO repace query with using 'instance.data["assetEntity"]' + asset_data = get_current_project_asset(instance.data["asset"])["data"] frame_start = asset_data["frameStart"] frame_end = asset_data["frameEnd"] handle_start = asset_data["handleStart"] diff --git a/openpype/hosts/unreal/plugins/load/load_animation.py b/openpype/hosts/unreal/plugins/load/load_animation.py index da2830bc52..1fe0bef462 100644 --- a/openpype/hosts/unreal/plugins/load/load_animation.py +++ b/openpype/hosts/unreal/plugins/load/load_animation.py @@ -8,13 +8,13 @@ from unreal import EditorAssetLibrary from unreal import MovieSceneSkeletalAnimationTrack from unreal import MovieSceneSkeletalAnimationSection +from openpype.pipeline.context_tools import get_current_project_asset from openpype.pipeline import ( get_representation_path, AVALON_CONTAINER_ID ) from openpype.hosts.unreal.api import plugin from openpype.hosts.unreal.api import pipeline as unreal_pipeline -from openpype.api import get_asset class AnimationFBXLoader(plugin.Loader): @@ -53,6 +53,8 @@ class AnimationFBXLoader(plugin.Loader): if not actor: return None + asset_doc = get_current_project_asset(fields=["data.fps"]) + task.set_editor_property('filename', self.fname) task.set_editor_property('destination_path', asset_dir) task.set_editor_property('destination_name', asset_name) @@ -80,7 +82,7 @@ class AnimationFBXLoader(plugin.Loader): task.options.anim_sequence_import_data.set_editor_property( 'use_default_sample_rate', False) task.options.anim_sequence_import_data.set_editor_property( - 'custom_sample_rate', get_asset()["data"].get("fps")) + 'custom_sample_rate', asset_doc.get("data", {}).get("fps")) task.options.anim_sequence_import_data.set_editor_property( 'import_custom_attribute', True) task.options.anim_sequence_import_data.set_editor_property( @@ -246,6 +248,7 @@ class AnimationFBXLoader(plugin.Loader): def update(self, container, representation): name = container["asset_name"] source_path = get_representation_path(representation) + asset_doc = get_current_project_asset(fields=["data.fps"]) destination_path = container["namespace"] task = unreal.AssetImportTask() @@ -279,7 +282,7 @@ class AnimationFBXLoader(plugin.Loader): task.options.anim_sequence_import_data.set_editor_property( 'use_default_sample_rate', False) task.options.anim_sequence_import_data.set_editor_property( - 'custom_sample_rate', get_asset()["data"].get("fps")) + 'custom_sample_rate', asset_doc.get("data", {}).get("fps")) task.options.anim_sequence_import_data.set_editor_property( 'import_custom_attribute', True) task.options.anim_sequence_import_data.set_editor_property( diff --git a/openpype/hosts/unreal/plugins/load/load_layout.py b/openpype/hosts/unreal/plugins/load/load_layout.py index 3f16a68ead..01d589c69b 100644 --- a/openpype/hosts/unreal/plugins/load/load_layout.py +++ b/openpype/hosts/unreal/plugins/load/load_layout.py @@ -20,7 +20,7 @@ from openpype.pipeline import ( AVALON_CONTAINER_ID, legacy_io, ) -from openpype.api import get_asset +from openpype.pipeline.context_tools import get_current_project_asset from openpype.hosts.unreal.api import plugin from openpype.hosts.unreal.api import pipeline as unreal_pipeline @@ -225,6 +225,7 @@ class LayoutLoader(plugin.Loader): anim_path = f"{asset_dir}/animations/{anim_file_name}" + asset_doc = get_current_project_asset() # Import animation task = unreal.AssetImportTask() task.options = unreal.FbxImportUI() @@ -259,7 +260,7 @@ class LayoutLoader(plugin.Loader): task.options.anim_sequence_import_data.set_editor_property( 'use_default_sample_rate', False) task.options.anim_sequence_import_data.set_editor_property( - 'custom_sample_rate', get_asset()["data"].get("fps")) + 'custom_sample_rate', asset_doc.get("data", {}).get("fps")) task.options.anim_sequence_import_data.set_editor_property( 'import_custom_attribute', True) task.options.anim_sequence_import_data.set_editor_property( From b5d7ae0d2a38d93ba5014c9a1aec455b9ca982ce Mon Sep 17 00:00:00 2001 From: jrsndlr Date: Fri, 15 Jul 2022 16:29:05 +0200 Subject: [PATCH 361/785] no need to copy codec and pixel format --- openpype/plugins/publish/extract_review_slate.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/openpype/plugins/publish/extract_review_slate.py b/openpype/plugins/publish/extract_review_slate.py index 737b7db295..2edaf10e6b 100644 --- a/openpype/plugins/publish/extract_review_slate.py +++ b/openpype/plugins/publish/extract_review_slate.py @@ -324,8 +324,6 @@ class ExtractReviewSlate(openpype.api.Extractor): copy_args = ( "-metadata", "-metadata:s:v:0", - "-codec:v", - "-pixfmt", "-b:v", "-b:a", ) From e8b4a3389e9ac0095bdafcdd008398dc69aac38c Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 15 Jul 2022 16:33:47 +0200 Subject: [PATCH 362/785] added comment do harmony plugin --- .../hosts/harmony/plugins/publish/validate_scene_settings.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/openpype/hosts/harmony/plugins/publish/validate_scene_settings.py b/openpype/hosts/harmony/plugins/publish/validate_scene_settings.py index 4c3a6c4465..936533abd6 100644 --- a/openpype/hosts/harmony/plugins/publish/validate_scene_settings.py +++ b/openpype/hosts/harmony/plugins/publish/validate_scene_settings.py @@ -55,6 +55,10 @@ class ValidateSceneSettings(pyblish.api.InstancePlugin): def process(self, instance): """Plugin entry point.""" + + # TODO 'get_asset_settings' could expect asset document as argument + # which is available on 'context.data["assetEntity"]' + # - the same approach can be used in 'ValidateSceneSettingsRepair' expected_settings = harmony.get_asset_settings() self.log.info("scene settings from DB:".format(expected_settings)) From 6f521242cbaa88a4bae403fa7b23c4d9faa9cd18 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 15 Jul 2022 17:01:00 +0200 Subject: [PATCH 363/785] implemented functions to filter containers into 4 possible categories --- openpype/pipeline/load/__init__.py | 4 + openpype/pipeline/load/utils.py | 132 +++++++++++++++++++++++++++++ 2 files changed, 136 insertions(+) diff --git a/openpype/pipeline/load/__init__.py b/openpype/pipeline/load/__init__.py index 6e7612d4c1..e05dde2f9c 100644 --- a/openpype/pipeline/load/__init__.py +++ b/openpype/pipeline/load/__init__.py @@ -24,6 +24,8 @@ from .utils import ( loaders_from_repre_context, loaders_from_representation, + + filter_containers, ) from .plugins import ( @@ -66,6 +68,8 @@ __all__ = ( "loaders_from_repre_context", "loaders_from_representation", + "filter_containers", + # plugins.py "LoaderPlugin", "SubsetLoaderPlugin", diff --git a/openpype/pipeline/load/utils.py b/openpype/pipeline/load/utils.py index 2c213aff6f..68850c095a 100644 --- a/openpype/pipeline/load/utils.py +++ b/openpype/pipeline/load/utils.py @@ -4,6 +4,7 @@ import copy import getpass import logging import inspect +import collections import numbers from openpype.client import ( @@ -15,6 +16,7 @@ from openpype.client import ( get_last_version_by_subset_id, get_hero_version_by_subset_id, get_version_by_name, + get_last_versions, get_representations, get_representation_by_id, get_representation_by_name, @@ -28,6 +30,11 @@ from openpype.pipeline import ( log = logging.getLogger(__name__) +ContainersFilterResult = collections.namedtuple( + "ContainersFilterResult", + ["latest", "outdated", "not_foud", "invalid"] +) + class HeroVersionType(object): def __init__(self, version): @@ -685,3 +692,128 @@ def loaders_from_representation(loaders, representation): context = get_representation_context(representation) return loaders_from_repre_context(loaders, context) + + +def filter_containers(containers, project_name): + """Filter containers and split them into 4 categories. + + Categories are 'latest', 'outdated', 'invalid' and 'not_found'. + The 'lastest' containers are from last version, 'outdated' are not, + 'invalid' are invalid containers (invalid content) and 'not_foud' has + some missing entity in database. + + Args: + containers (list[dict]): List of containers referenced into scene. + project_name (str): Name of project in which context shoud look for + versions. + + Returns: + ContainersFilterResult: Named tuple with 'latest', 'outdated', + 'invalid' and 'not_found' containers. + """ + + outdated_containers = [] + uptodate_containers = [] + not_found_containers = [] + invalid_containers = [] + output = ContainersFilterResult( + uptodate_containers, + outdated_containers, + not_found_containers, + invalid_containers + ) + # Query representation docs to get it's version ids + repre_ids = { + container["representation"] + for container in containers + if container["representation"] + } + if not repre_ids: + if containers: + invalid_containers.extend(containers) + return output + + repre_docs = get_representations( + project_name, + representation_ids=repre_ids, + fields=["_id", "parent"] + ) + # Store representations by stringified representation id + repre_docs_by_str_id = {} + repre_docs_by_version_id = collections.defaultdict(list) + for repre_doc in repre_docs: + repre_id = str(repre_doc["_id"]) + version_id = repre_doc["parent"] + repre_docs_by_str_id[repre_id] = repre_doc + repre_docs_by_version_id[version_id].append(repre_doc) + + # Query version docs to get it's subset ids + # - also query hero version to be able identify if representation + # belongs to existing version + version_docs = get_versions( + project_name, + version_ids=repre_docs_by_version_id.keys(), + hero=True, + fields=["_id", "parent", "type"] + ) + verisons_by_id = {} + versions_by_subset_id = collections.defaultdict(list) + hero_version_ids = set() + for version_doc in version_docs: + version_id = version_doc["_id"] + # Store versions by their ids + verisons_by_id[version_id] = version_doc + # There's no need to query subsets for hero versions + # - they are considered as latest? + if version_doc["type"] == "hero_version": + hero_version_ids.add(version_id) + continue + subset_id = version_doc["parent"] + versions_by_subset_id[subset_id].append(version_doc) + + last_versions = get_last_versions( + project_name, + subset_ids=versions_by_subset_id.keys(), + fields=["_id"] + ) + # Figure out which versions are outdated + outdated_version_ids = set() + for subset_id, last_version_doc in last_versions.items(): + for version_doc in versions_by_subset_id[subset_id]: + version_id = version_doc["_id"] + if version_id != last_version_doc["_id"]: + outdated_version_ids.add(version_id) + + # Based on all collected data figure out which containers are outdated + # - log out if there are missing representation or version documents + for container in containers: + container_name = container["objectName"] + repre_id = container["representation"] + if not repre_id: + invalid_containers.append(container) + continue + + repre_doc = repre_docs_by_str_id.get(repre_id) + if not repre_doc: + log.debug(( + "Container '{}' has an invalid representation." + " It is missing in the database." + ).format(container_name)) + not_found_containers.append(container) + continue + + version_id = repre_doc["parent"] + if version_id in outdated_version_ids: + outdated_containers.append(container) + + elif version_id not in verisons_by_id: + log.debug(( + "Representation on container '{}' has an invalid version." + " It is missing in the database." + ).format(container_name)) + not_found_containers.append(container) + + else: + uptodate_containers.append(container) + + return output From 1ec708ce7f5786a6cff9bbd490beff0872553d01 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 15 Jul 2022 17:01:28 +0200 Subject: [PATCH 364/785] added helper functions to get outdated containers or just check if there are any outdated --- openpype/pipeline/load/__init__.py | 4 ++++ openpype/pipeline/load/utils.py | 29 +++++++++++++++++++++++++++++ 2 files changed, 33 insertions(+) diff --git a/openpype/pipeline/load/__init__.py b/openpype/pipeline/load/__init__.py index e05dde2f9c..e46d9f152b 100644 --- a/openpype/pipeline/load/__init__.py +++ b/openpype/pipeline/load/__init__.py @@ -25,6 +25,8 @@ from .utils import ( loaders_from_repre_context, loaders_from_representation, + any_outdated_containers, + get_outdated_containers, filter_containers, ) @@ -68,6 +70,8 @@ __all__ = ( "loaders_from_repre_context", "loaders_from_representation", + "any_outdated_containers", + "get_outdated_containers", "filter_containers", # plugins.py diff --git a/openpype/pipeline/load/utils.py b/openpype/pipeline/load/utils.py index 68850c095a..a9aa240ff6 100644 --- a/openpype/pipeline/load/utils.py +++ b/openpype/pipeline/load/utils.py @@ -694,6 +694,35 @@ def loaders_from_representation(loaders, representation): return loaders_from_repre_context(loaders, context) +def any_outdated_containers(host=None, project_name=None): + """Check if there are any outdated containers in scene.""" + + if get_outdated_containers(host, project_name): + return True + return False + + +def get_outdated_containers(host=None, project_name=None): + """Collect outdated containers from host scene. + + Currently registered host and project in global session are used if + arguments are not passed. + + Args: + host (ModuleType): Host implementation with 'ls' function available. + project_name (str): Name of project in which context we are. + """ + + if host is None: + from openpype.pipeline import registered_host + host = registered_host() + + if project_name is None: + project_name = legacy_io.active_project() + containers = host.ls() + return filter_containers(containers, project_name).outdated + + def filter_containers(containers, project_name): """Filter containers and split them into 4 categories. From f3b628843b5f8e986d4d52483bbaf9a94a0440b4 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 15 Jul 2022 17:06:35 +0200 Subject: [PATCH 365/785] marked 'any_outdated' in 'openpype.lib' as deprecated --- openpype/lib/avalon_context.py | 26 +++----------------------- 1 file changed, 3 insertions(+), 23 deletions(-) diff --git a/openpype/lib/avalon_context.py b/openpype/lib/avalon_context.py index 76ed6cbbd3..b3113ce188 100644 --- a/openpype/lib/avalon_context.py +++ b/openpype/lib/avalon_context.py @@ -19,7 +19,6 @@ from openpype.client import ( get_last_versions, get_last_version_by_subset_id, get_representations, - get_representation_by_id, get_workfile_info, ) from openpype.settings import ( @@ -208,32 +207,13 @@ def is_latest(representation): return version["_id"] == last_version["_id"] -@with_pipeline_io +@deprecated("openpype.pipeline.load.any_outdated_containers") def any_outdated(): """Return whether the current scene has any outdated content""" - from openpype.pipeline import registered_host - project_name = legacy_io.active_project() - checked = set() - host = registered_host() - for container in host.ls(): - representation = container['representation'] - if representation in checked: - continue + from openpype.pipeline.load import any_outdated_containers - representation_doc = get_representation_by_id( - project_name, representation, fields=["parent"] - ) - if representation_doc and not is_latest(representation_doc): - return True - elif not representation_doc: - log.debug("Container '{objectName}' has an invalid " - "representation, it is missing in the " - "database".format(**container)) - - checked.add(representation) - - return False + return any_outdated_containers() @with_pipeline_io From 6e90984528199eb697d55ba2c8fe0df8d7cec87b Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 15 Jul 2022 17:07:22 +0200 Subject: [PATCH 366/785] replace usage of 'any_outdated' with 'any_outdated_containers' --- openpype/hosts/aftereffects/api/pipeline.py | 4 ++-- openpype/hosts/houdini/api/pipeline.py | 4 ++-- openpype/hosts/maya/api/pipeline.py | 4 ++-- openpype/hosts/photoshop/api/pipeline.py | 5 ++--- openpype/plugins/publish/validate_containers.py | 4 ++-- 5 files changed, 10 insertions(+), 11 deletions(-) diff --git a/openpype/hosts/aftereffects/api/pipeline.py b/openpype/hosts/aftereffects/api/pipeline.py index 0bc47665b0..c13c22ced5 100644 --- a/openpype/hosts/aftereffects/api/pipeline.py +++ b/openpype/hosts/aftereffects/api/pipeline.py @@ -1,5 +1,4 @@ import os -import sys from Qt import QtWidgets @@ -15,6 +14,7 @@ from openpype.pipeline import ( AVALON_CONTAINER_ID, legacy_io, ) +from openpype.pipeline.load import any_outdated_containers import openpype.hosts.aftereffects from openpype.lib import register_event_callback @@ -136,7 +136,7 @@ def ls(): def check_inventory(): """Checks loaded containers if they are of highest version""" - if not lib.any_outdated(): + if not any_outdated_containers(): return # Warn about outdated containers. diff --git a/openpype/hosts/houdini/api/pipeline.py b/openpype/hosts/houdini/api/pipeline.py index 7048accceb..b5f5459392 100644 --- a/openpype/hosts/houdini/api/pipeline.py +++ b/openpype/hosts/houdini/api/pipeline.py @@ -12,13 +12,13 @@ from openpype.pipeline import ( register_loader_plugin_path, AVALON_CONTAINER_ID, ) +from openpype.pipeline.load import any_outdated_containers import openpype.hosts.houdini from openpype.hosts.houdini.api import lib from openpype.lib import ( register_event_callback, emit_event, - any_outdated, ) from .lib import get_asset_fps @@ -245,7 +245,7 @@ def on_open(): # ensure it is using correct FPS for the asset lib.validate_fps() - if any_outdated(): + if any_outdated_containers(): from openpype.widgets import popup log.warning("Scene has outdated content.") diff --git a/openpype/hosts/maya/api/pipeline.py b/openpype/hosts/maya/api/pipeline.py index d08e8d1926..f565f6a308 100644 --- a/openpype/hosts/maya/api/pipeline.py +++ b/openpype/hosts/maya/api/pipeline.py @@ -13,7 +13,6 @@ from openpype.host import HostBase, IWorkfileHost, ILoadHost import openpype.hosts.maya from openpype.tools.utils import host_tools from openpype.lib import ( - any_outdated, register_event_callback, emit_event ) @@ -28,6 +27,7 @@ from openpype.pipeline import ( deregister_creator_plugin_path, AVALON_CONTAINER_ID, ) +from openpype.pipeline.load import any_outdated_containers from openpype.hosts.maya.lib import copy_workspace_mel from . import menu, lib from .workio import ( @@ -470,7 +470,7 @@ def on_open(): lib.validate_fps() lib.fix_incompatible_containers() - if any_outdated(): + if any_outdated_containers(): log.warning("Scene has outdated content.") # Find maya main window diff --git a/openpype/hosts/photoshop/api/pipeline.py b/openpype/hosts/photoshop/api/pipeline.py index 20a6e3169f..ee150d1808 100644 --- a/openpype/hosts/photoshop/api/pipeline.py +++ b/openpype/hosts/photoshop/api/pipeline.py @@ -1,6 +1,5 @@ import os from Qt import QtWidgets -from bson.objectid import ObjectId import pyblish.api @@ -13,8 +12,8 @@ from openpype.pipeline import ( deregister_loader_plugin_path, deregister_creator_plugin_path, AVALON_CONTAINER_ID, - registered_host, ) +from openpype.pipeline.load import any_outdated_containers import openpype.hosts.photoshop from . import lib @@ -30,7 +29,7 @@ INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory") def check_inventory(): - if not lib.any_outdated(): + if not any_outdated_containers(): return # Warn about outdated containers. diff --git a/openpype/plugins/publish/validate_containers.py b/openpype/plugins/publish/validate_containers.py index ce91bd3396..7732ec5ea9 100644 --- a/openpype/plugins/publish/validate_containers.py +++ b/openpype/plugins/publish/validate_containers.py @@ -1,5 +1,5 @@ import pyblish.api -import openpype.lib +from openpype.pipeline.load import any_outdated_containers class ShowInventory(pyblish.api.Action): @@ -24,5 +24,5 @@ class ValidateContainers(pyblish.api.ContextPlugin): actions = [ShowInventory] def process(self, context): - if openpype.lib.any_outdated(): + if any_outdated_containers(): raise ValueError("There are outdated containers in the scene.") From b0ce3e851ddc03850f1c05bc3a7eda78a7621708 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 15 Jul 2022 17:25:53 +0200 Subject: [PATCH 367/785] added function to check if version is latest --- openpype/client/__init__.py | 4 ++++ openpype/client/entities.py | 36 ++++++++++++++++++++++++++++++++++++ 2 files changed, 40 insertions(+) diff --git a/openpype/client/__init__.py b/openpype/client/__init__.py index 97e6755d09..4b8213a8ac 100644 --- a/openpype/client/__init__.py +++ b/openpype/client/__init__.py @@ -25,6 +25,8 @@ from .entities import ( get_last_version_by_subset_name, get_output_link_versions, + version_is_latest, + get_representation_by_id, get_representation_by_name, get_representations, @@ -66,6 +68,8 @@ __all__ = ( "get_last_version_by_subset_name", "get_output_link_versions", + "version_is_latest", + "get_representation_by_id", "get_representation_by_name", "get_representations", diff --git a/openpype/client/entities.py b/openpype/client/entities.py index 9d65355d1b..468f569c7f 100644 --- a/openpype/client/entities.py +++ b/openpype/client/entities.py @@ -557,6 +557,42 @@ def get_version_by_name(project_name, version, subset_id, fields=None): return conn.find_one(query_filter, _prepare_fields(fields)) +def version_is_latest(project_name, version_id): + """Is version the latest from it's subset. + + Note: + Hero versions are considered as latest. + + Todo: + Maybe raise exception when version was not found? + + Args: + project_name (str):Name of project where to look for queried entities. + version_id (str|ObjectId): Version id which is checked. + + Returns: + bool: True if is latest version from subset else False. + """ + + version_id = _convert_id(version_id) + if not version_id: + return False + version_doc = get_version_by_id( + project_name, version_id, fields=["_id", "type", "parent"] + ) + # What to de when version is not found? + if not version_doc: + return False + + if version_doc["type"] == "hero_version": + return True + + last_version = get_last_version_by_subset_id( + project_name, version_doc["parent"], fields=["_id"] + ) + return last_version["_id"] == version_id + + def _get_versions( project_name, subset_ids=None, From a081a0f3e1e21f6ced4ff4ee3b74d40cd97e788e Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 15 Jul 2022 17:43:05 +0200 Subject: [PATCH 368/785] Added wrapper around cmds.setAttr Logs and captures exception when attribute is not possible to set (when locked) --- openpype/vendor/python/common/capture.py | 32 ++++++++++++++++-------- 1 file changed, 22 insertions(+), 10 deletions(-) diff --git a/openpype/vendor/python/common/capture.py b/openpype/vendor/python/common/capture.py index 4d9e1da3e4..71b86a5f1a 100644 --- a/openpype/vendor/python/common/capture.py +++ b/openpype/vendor/python/common/capture.py @@ -403,7 +403,7 @@ def apply_view(panel, **options): camera_options = options.get("camera_options", {}) _iteritems = getattr(camera_options, "iteritems", camera_options.items) for key, value in _iteritems: - cmds.setAttr("{0}.{1}".format(camera, key), value) + _safe_setAttr("{0}.{1}".format(camera, key), value) # Viewport options viewport_options = options.get("viewport_options", {}) @@ -417,7 +417,7 @@ def apply_view(panel, **options): ) for key, value in _iteritems(): attr = "hardwareRenderingGlobals.{0}".format(key) - cmds.setAttr(attr, value) + _safe_setAttr(attr, value) def parse_active_panel(): @@ -551,10 +551,10 @@ def apply_scene(**options): cmds.playbackOptions(maxTime=options["end_frame"]) if "width" in options: - cmds.setAttr("defaultResolution.width", options["width"]) + _safe_setAttr("defaultResolution.width", options["width"]) if "height" in options: - cmds.setAttr("defaultResolution.height", options["height"]) + _safe_setAttr("defaultResolution.height", options["height"]) if "compression" in options: cmds.optionVar( @@ -665,7 +665,7 @@ def _applied_camera_options(options, panel): _iteritems = getattr(options, "iteritems", options.items) for opt, value in _iteritems(): - cmds.setAttr(camera + "." + opt, value) + _safe_setAttr(camera + "." + opt, value) try: yield @@ -673,7 +673,7 @@ def _applied_camera_options(options, panel): if old_options: _iteritems = getattr(old_options, "iteritems", old_options.items) for opt, value in _iteritems(): - cmds.setAttr(camera + "." + opt, value) + _safe_setAttr(camera + "." + opt, value) @contextlib.contextmanager @@ -760,7 +760,7 @@ def _applied_viewport2_options(options): # Apply settings _iteritems = getattr(options, "iteritems", options.items) for opt, value in _iteritems(): - cmds.setAttr("hardwareRenderingGlobals." + opt, value) + _safe_setAttr("hardwareRenderingGlobals." + opt, value) try: yield @@ -768,7 +768,7 @@ def _applied_viewport2_options(options): # Restore previous settings _iteritems = getattr(original, "iteritems", original.items) for opt, value in _iteritems(): - cmds.setAttr("hardwareRenderingGlobals." + opt, value) + _safe_setAttr("hardwareRenderingGlobals." + opt, value) @contextlib.contextmanager @@ -802,14 +802,14 @@ def _maintain_camera(panel, camera): else: state = dict((camera, cmds.getAttr(camera + ".rnd")) for camera in cmds.ls(type="camera")) - cmds.setAttr(camera + ".rnd", True) + _safe_setAttr(camera + ".rnd", True) try: yield finally: _iteritems = getattr(state, "iteritems", state.items) for camera, renderable in _iteritems(): - cmds.setAttr(camera + ".rnd", renderable) + _safe_setAttr(camera + ".rnd", renderable) @contextlib.contextmanager @@ -846,6 +846,18 @@ def _in_standalone(): return not hasattr(cmds, "about") or cmds.about(batch=True) +def _safe_setAttr(*args, **kwargs): + """Wrapper to handle failures when attribute is locked. + + Temporary hotfix until better approach (store value, unlock, set new, + return old, lock again) is implemented. + """ + try: + cmds.setAttr(*args, **kwargs) + except RuntimeError: + print("Cannot setAttr {}!".format(args)) + + # -------------------------------- # # Apply version specific settings From 95eb83d8e05749a430d04c22a6b0486b983ba315 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 15 Jul 2022 17:45:45 +0200 Subject: [PATCH 369/785] use 'get_outdated_containers' in harmony --- openpype/hosts/harmony/api/pipeline.py | 14 ++------------ 1 file changed, 2 insertions(+), 12 deletions(-) diff --git a/openpype/hosts/harmony/api/pipeline.py b/openpype/hosts/harmony/api/pipeline.py index 86b5753f7e..3246f1add9 100644 --- a/openpype/hosts/harmony/api/pipeline.py +++ b/openpype/hosts/harmony/api/pipeline.py @@ -5,16 +5,15 @@ import logging import pyblish.api from openpype import lib -from openpype.client import get_representation_by_id from openpype.lib import register_event_callback from openpype.pipeline import ( - legacy_io, register_loader_plugin_path, register_creator_plugin_path, deregister_loader_plugin_path, deregister_creator_plugin_path, AVALON_CONTAINER_ID, ) +from openpype.pipeline.load import get_outdated_containers import openpype.hosts.harmony import openpype.hosts.harmony.api as harmony @@ -105,16 +104,7 @@ def check_inventory(): in Harmony. """ - project_name = legacy_io.active_project() - outdated_containers = [] - for container in ls(): - representation_id = container['representation'] - representation_doc = get_representation_by_id( - project_name, representation_id, fields=["parent"] - ) - if representation_doc and not lib.is_latest(representation_doc): - outdated_containers.append(container) - + outdated_containers = get_outdated_containers() if not outdated_containers: return From c8d18dafa1a9366ddf07f1451e8b926533fdf07a Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 15 Jul 2022 17:46:21 +0200 Subject: [PATCH 370/785] 'is_latest' moved to pipeline as 'is_representation_from_latest' --- .../harmony/plugins/load/load_background.py | 9 +++------ .../plugins/load/load_imagesequence.py | 4 ++-- .../harmony/plugins/load/load_template.py | 4 ++-- openpype/lib/avalon_context.py | 19 +++---------------- openpype/pipeline/context_tools.py | 15 +++++++++++++++ 5 files changed, 25 insertions(+), 26 deletions(-) diff --git a/openpype/hosts/harmony/plugins/load/load_background.py b/openpype/hosts/harmony/plugins/load/load_background.py index 9c01fe3cd8..9e9fcbfa32 100644 --- a/openpype/hosts/harmony/plugins/load/load_background.py +++ b/openpype/hosts/harmony/plugins/load/load_background.py @@ -5,8 +5,8 @@ from openpype.pipeline import ( load, get_representation_path, ) +from openpype.pipeline.context_tools import is_representation_from_latest import openpype.hosts.harmony.api as harmony -import openpype.lib copy_files = """function copyFile(srcFilename, dstFilename) @@ -280,9 +280,7 @@ class BackgroundLoader(load.LoaderPlugin): ) def update(self, container, representation): - path = get_representation_path(representation) - with open(path) as json_file: data = json.load(json_file) @@ -300,10 +298,9 @@ class BackgroundLoader(load.LoaderPlugin): bg_folder = os.path.dirname(path) - path = get_representation_path(representation) - print(container) + is_latest = is_representation_from_latest(representation["parent"]) for layer in sorted(layers): file_to_import = [ os.path.join(bg_folder, layer).replace("\\", "/") @@ -347,7 +344,7 @@ class BackgroundLoader(load.LoaderPlugin): } %s """ % (sig, sig) - if openpype.lib.is_latest(representation): + if is_latest: harmony.send({"function": func, "args": [node, "green"]}) else: harmony.send({"function": func, "args": [node, "red"]}) diff --git a/openpype/hosts/harmony/plugins/load/load_imagesequence.py b/openpype/hosts/harmony/plugins/load/load_imagesequence.py index 18695438d5..8d6421a6aa 100644 --- a/openpype/hosts/harmony/plugins/load/load_imagesequence.py +++ b/openpype/hosts/harmony/plugins/load/load_imagesequence.py @@ -10,8 +10,8 @@ from openpype.pipeline import ( load, get_representation_path, ) +from openpype.pipeline.context_tools import is_representation_from_latest import openpype.hosts.harmony.api as harmony -import openpype.lib class ImageSequenceLoader(load.LoaderPlugin): @@ -109,7 +109,7 @@ class ImageSequenceLoader(load.LoaderPlugin): ) # Colour node. - if openpype.lib.is_latest(representation): + if is_representation_from_latest(representation["parent"]): harmony.send( { "function": "PypeHarmony.setColor", diff --git a/openpype/hosts/harmony/plugins/load/load_template.py b/openpype/hosts/harmony/plugins/load/load_template.py index c6dc9d913b..8ddd3934f7 100644 --- a/openpype/hosts/harmony/plugins/load/load_template.py +++ b/openpype/hosts/harmony/plugins/load/load_template.py @@ -10,8 +10,8 @@ from openpype.pipeline import ( load, get_representation_path, ) +from openpype.pipeline.context_tools import is_representation_from_latest import openpype.hosts.harmony.api as harmony -import openpype.lib class TemplateLoader(load.LoaderPlugin): @@ -83,7 +83,7 @@ class TemplateLoader(load.LoaderPlugin): self_name = self.__class__.__name__ update_and_replace = False - if openpype.lib.is_latest(representation): + if is_representation_from_latest(representation["parent"]): self._set_green(node) else: self._set_red(node) diff --git a/openpype/lib/avalon_context.py b/openpype/lib/avalon_context.py index b3113ce188..1108791953 100644 --- a/openpype/lib/avalon_context.py +++ b/openpype/lib/avalon_context.py @@ -15,7 +15,6 @@ from openpype.client import ( get_asset_by_name, get_subset_by_name, get_subsets, - get_version_by_id, get_last_versions, get_last_version_by_subset_id, get_representations, @@ -179,7 +178,7 @@ def with_pipeline_io(func): return wrapped -@with_pipeline_io +@deprecated("openpype.pipeline.context_tools.is_representation_from_latest") def is_latest(representation): """Return whether the representation is from latest version @@ -190,21 +189,9 @@ def is_latest(representation): bool: Whether the representation is of latest version. """ - project_name = legacy_io.active_project() - version = get_version_by_id( - project_name, - representation["parent"], - fields=["_id", "type", "parent"] - ) - if version["type"] == "hero_version": - return True + from openpype.pipeline.context_tools import is_representation_from_latest - # Get highest version under the parent - last_version = get_last_version_by_subset_id( - project_name, version["parent"], fields=["_id"] - ) - - return version["_id"] == last_version["_id"] + return is_representation_from_latest(representation) @deprecated("openpype.pipeline.load.any_outdated_containers") diff --git a/openpype/pipeline/context_tools.py b/openpype/pipeline/context_tools.py index e719e46514..e2f9df5dae 100644 --- a/openpype/pipeline/context_tools.py +++ b/openpype/pipeline/context_tools.py @@ -10,6 +10,7 @@ import pyblish.api from pyblish.lib import MessageHandler import openpype +from openpype.client import version_is_latest from openpype.modules import load_modules, ModulesManager from openpype.settings import get_project_settings from openpype.lib import filter_pyblish_plugins @@ -304,3 +305,17 @@ def debug_host(): }) return host + + +def is_representation_from_latest(representation): + """Return whether the representation is from latest version + + Args: + representation (dict): The representation document from the database. + + Returns: + bool: Whether the representation is of latest version. + """ + + project_name = legacy_io.active_project() + return version_is_latest(project_name, representation["parent"]) From 3cc78c2f98d3fd652dbe9d865d54df86bf6cd688 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 15 Jul 2022 17:56:30 +0200 Subject: [PATCH 371/785] trayp: rename `invisible` to `hidden` --- openpype/hosts/traypublisher/api/plugin.py | 4 ++-- .../hosts/traypublisher/plugins/create/create_editorial.py | 4 ++-- openpype/pipeline/create/__init__.py | 4 ++-- openpype/pipeline/create/creator_plugins.py | 2 +- 4 files changed, 7 insertions(+), 7 deletions(-) diff --git a/openpype/hosts/traypublisher/api/plugin.py b/openpype/hosts/traypublisher/api/plugin.py index cb2f86eed7..3a268be55d 100644 --- a/openpype/hosts/traypublisher/api/plugin.py +++ b/openpype/hosts/traypublisher/api/plugin.py @@ -1,6 +1,6 @@ from openpype.pipeline.create import ( Creator, - InvisibleCreator, + HiddenCreator, CreatedInstance ) from openpype.lib import ( @@ -15,7 +15,7 @@ from .pipeline import ( ) -class InvisibleTrayPublishCreator(InvisibleCreator): +class HiddenTrayPublishCreator(HiddenCreator): host_name = "traypublisher" def collect_instances(self): diff --git a/openpype/hosts/traypublisher/plugins/create/create_editorial.py b/openpype/hosts/traypublisher/plugins/create/create_editorial.py index ffff5de70a..8f7101385c 100644 --- a/openpype/hosts/traypublisher/plugins/create/create_editorial.py +++ b/openpype/hosts/traypublisher/plugins/create/create_editorial.py @@ -8,7 +8,7 @@ from openpype.client import ( ) from openpype.hosts.traypublisher.api.plugin import ( TrayPublishCreator, - InvisibleTrayPublishCreator + HiddenTrayPublishCreator ) from openpype.hosts.traypublisher.api.editorial import ( ShotMetadataSover @@ -60,7 +60,7 @@ CLIP_ATTR_DEFS = [ ] -class EditorialClipInstanceCreatorBase(InvisibleTrayPublishCreator): +class EditorialClipInstanceCreatorBase(HiddenTrayPublishCreator): host_name = "traypublisher" def __init__( diff --git a/openpype/pipeline/create/__init__.py b/openpype/pipeline/create/__init__.py index cd01c53cf5..bd196ccfd1 100644 --- a/openpype/pipeline/create/__init__.py +++ b/openpype/pipeline/create/__init__.py @@ -7,7 +7,7 @@ from .creator_plugins import ( BaseCreator, Creator, AutoCreator, - InvisibleCreator, + HiddenCreator, discover_creator_plugins, discover_legacy_creator_plugins, @@ -36,7 +36,7 @@ __all__ = ( "BaseCreator", "Creator", "AutoCreator", - "InvisibleCreator", + "HiddenCreator", "discover_creator_plugins", "discover_legacy_creator_plugins", diff --git a/openpype/pipeline/create/creator_plugins.py b/openpype/pipeline/create/creator_plugins.py index 4d953a0605..8cb161de20 100644 --- a/openpype/pipeline/create/creator_plugins.py +++ b/openpype/pipeline/create/creator_plugins.py @@ -416,7 +416,7 @@ class Creator(BaseCreator): return self.pre_create_attr_defs -class InvisibleCreator(BaseCreator): +class HiddenCreator(BaseCreator): @abstractmethod def create(self, instance_data, source_data): pass From 29de28cb5371ced19fdf35368ce8e4a9f4f8b074 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 15 Jul 2022 17:57:05 +0200 Subject: [PATCH 372/785] trayp: editorial publishing wip --- openpype/hosts/traypublisher/api/editorial.py | 1 + .../plugins/create/create_editorial.py | 49 +++- .../plugins/publish/collect_clip_instances.py | 32 +++ .../publish/collect_editorial_instances.py | 8 +- .../publish/collect_editorial_resources.py | 271 ++++++++++++++++++ .../plugins/publish/collect_shot_instances.py | 163 +++++++++++ .../publish/extract_trim_video_audio.py | 2 +- .../plugins/publish/validate_asset_docs.py | 4 + 8 files changed, 516 insertions(+), 14 deletions(-) create mode 100644 openpype/hosts/traypublisher/plugins/publish/collect_clip_instances.py create mode 100644 openpype/hosts/traypublisher/plugins/publish/collect_editorial_resources.py create mode 100644 openpype/hosts/traypublisher/plugins/publish/collect_shot_instances.py rename openpype/{hosts/standalonepublisher => }/plugins/publish/extract_trim_video_audio.py (98%) diff --git a/openpype/hosts/traypublisher/api/editorial.py b/openpype/hosts/traypublisher/api/editorial.py index 713f1b5c6c..948e05ec61 100644 --- a/openpype/hosts/traypublisher/api/editorial.py +++ b/openpype/hosts/traypublisher/api/editorial.py @@ -4,6 +4,7 @@ from copy import deepcopy from openpype.client import get_asset_by_id from openpype.pipeline.create import CreatorError + class ShotMetadataSover: """Collecting hierarchy context from `parents` and `hierarchy` data present in `clip` family instances coming from the request json data file diff --git a/openpype/hosts/traypublisher/plugins/create/create_editorial.py b/openpype/hosts/traypublisher/plugins/create/create_editorial.py index 8f7101385c..b87253a705 100644 --- a/openpype/hosts/traypublisher/plugins/create/create_editorial.py +++ b/openpype/hosts/traypublisher/plugins/create/create_editorial.py @@ -232,14 +232,10 @@ or updating already created. Publishing will create OTIO file. def _create_otio_instance(self, subset_name, data, pre_create_data): # get path of sequence file_path_data = pre_create_data["sequence_filepath_data"] + media_path_data = pre_create_data["media_filepaths_data"] - if len(file_path_data["filenames"]) == 0: - raise FileExistsError("File path was not added") - - file_path = os.path.join( - file_path_data["directory"], file_path_data["filenames"][0]) - - self.log.info(f"file_path: {file_path}") + file_path = self._get_path_from_file_data(file_path_data) + media_path = self._get_path_from_file_data(media_path_data) # get editorial sequence file into otio timeline object extension = os.path.splitext(file_path)[1] @@ -256,6 +252,7 @@ or updating already created. Publishing will create OTIO file. # Pass precreate data to creator attributes data.update({ "sequenceFilePath": file_path, + "editorialSourcePath": media_path, "otioTimeline": otio.adapters.write_to_string(otio_timeline) }) @@ -263,6 +260,18 @@ or updating already created. Publishing will create OTIO file. return otio_timeline + def _get_path_from_file_data(self, file_path_data): + # TODO: just temporarly solving only one media file + if isinstance(file_path_data, list): + file_path_data = file_path_data.pop() + + if len(file_path_data["filenames"]) == 0: + raise FileExistsError( + f"File path was not added: {file_path_data}") + + return os.path.join( + file_path_data["directory"], file_path_data["filenames"][0]) + def _get_clip_instances( self, otio_timeline, @@ -303,11 +312,14 @@ or updating already created. Publishing will create OTIO file. "instance_label": None, "instance_id": None } - self.log.info( - f"Creating subsets from presets: \n{pformat(family_presets)}") + self.log.info(( + "Creating subsets from presets: \n" + f"{pformat(family_presets)}" + )) for _fpreset in family_presets: instance = self._make_subset_instance( + clip, _fpreset, deepcopy(base_instance_data), parenting_data @@ -316,6 +328,7 @@ or updating already created. Publishing will create OTIO file. def _make_subset_instance( self, + clip, _fpreset, future_instance_data, parenting_data @@ -329,6 +342,8 @@ or updating already created. Publishing will create OTIO file. # add file extension filter only if it is not shot family if family == "shot": + future_instance_data["otioClip"] = ( + otio.adapters.write_to_string(clip)) c_instance = self.create_context.creators[ "editorial_shot"].create( future_instance_data) @@ -458,6 +473,7 @@ or updating already created. Publishing will create OTIO file. # TODO: should loockup shot name for update "asset": parent_asset_name, "task": "", + # parent time properties "trackStartFrame": track_start_frame, "timelineOffset": timeline_offset, @@ -568,7 +584,20 @@ or updating already created. Publishing will create OTIO file. ".fcpxml" ], allow_sequences=False, - label="Filepath", + single_item=True, + label="Sequence file", + ), + FileDef( + "media_filepaths_data", + folders=False, + extensions=[ + ".mov", + ".mp4", + ".wav" + ], + allow_sequences=False, + single_item=False, + label="Media files", ), # TODO: perhpas better would be timecode and fps input NumberDef( diff --git a/openpype/hosts/traypublisher/plugins/publish/collect_clip_instances.py b/openpype/hosts/traypublisher/plugins/publish/collect_clip_instances.py new file mode 100644 index 0000000000..e3dfb1512a --- /dev/null +++ b/openpype/hosts/traypublisher/plugins/publish/collect_clip_instances.py @@ -0,0 +1,32 @@ +from pprint import pformat +import pyblish.api + + +class CollectClipInstance(pyblish.api.InstancePlugin): + """Collect clip instances and resolve its parent""" + + label = "Collect Clip Instances" + order = pyblish.api.CollectorOrder + + hosts = ["traypublisher"] + families = ["plate", "review", "audio"] + + def process(self, instance): + creator_identifier = instance.data["creator_identifier"] + if "editorial" not in creator_identifier: + return + + instance.data["families"].append("clip") + + parent_instance_id = instance.data["parent_instance_id"] + edit_shared_data = instance.context.data["editorialSharedData"] + instance.data.update( + edit_shared_data[parent_instance_id] + ) + + if "editorialSourcePath" in instance.context.data.keys(): + instance.data["editorialSourcePath"] = ( + instance.context.data["editorialSourcePath"]) + instance.data["families"].append("trimming") + + self.log.debug(pformat(instance.data)) \ No newline at end of file diff --git a/openpype/hosts/traypublisher/plugins/publish/collect_editorial_instances.py b/openpype/hosts/traypublisher/plugins/publish/collect_editorial_instances.py index c088709a61..e181d0abe5 100644 --- a/openpype/hosts/traypublisher/plugins/publish/collect_editorial_instances.py +++ b/openpype/hosts/traypublisher/plugins/publish/collect_editorial_instances.py @@ -4,11 +4,11 @@ import pyblish.api import opentimelineio as otio -class CollectSettingsSimpleInstances(pyblish.api.InstancePlugin): +class CollectEditorialInstance(pyblish.api.InstancePlugin): """Collect data for instances created by settings creators.""" label = "Collect Editorial Instances" - order = pyblish.api.CollectorOrder + order = pyblish.api.CollectorOrder - 0.1 hosts = ["traypublisher"] families = ["editorial"] @@ -27,6 +27,8 @@ class CollectSettingsSimpleInstances(pyblish.api.InstancePlugin): otio_timeline_string) instance.context.data["otioTimeline"] = otio_timeline + instance.context.data["editorialSourcePath"] = ( + instance.data["editorialSourcePath"]) self.log.info(fpath) @@ -41,6 +43,6 @@ class CollectSettingsSimpleInstances(pyblish.api.InstancePlugin): "files": os.path.basename(fpath) }) - self.log.debug("Created Simple Settings instance {}".format( + self.log.debug("Created Editorial Instance {}".format( pformat(instance.data) )) diff --git a/openpype/hosts/traypublisher/plugins/publish/collect_editorial_resources.py b/openpype/hosts/traypublisher/plugins/publish/collect_editorial_resources.py new file mode 100644 index 0000000000..33a852e7a5 --- /dev/null +++ b/openpype/hosts/traypublisher/plugins/publish/collect_editorial_resources.py @@ -0,0 +1,271 @@ +import os +import re +import tempfile +import pyblish.api +from copy import deepcopy +import clique + + +class CollectInstanceResources(pyblish.api.InstancePlugin): + """Collect instance's resources""" + + # must be after `CollectInstances` + order = pyblish.api.CollectorOrder + label = "Collect Editorial Resources" + hosts = ["standalonepublisher"] + families = ["clip"] + + def process(self, instance): + self.context = instance.context + self.log.info(f"Processing instance: {instance}") + self.new_instances = [] + subset_files = dict() + subset_dirs = list() + anatomy = self.context.data["anatomy"] + anatomy_data = deepcopy(self.context.data["anatomyData"]) + anatomy_data.update({"root": anatomy.roots}) + + subset = instance.data["subset"] + clip_name = instance.data["clipName"] + + editorial_source_root = instance.data["editorialSourceRoot"] + editorial_source_path = instance.data["editorialSourcePath"] + + # if `editorial_source_path` then loop through + if editorial_source_path: + # add family if mov or mp4 found which is longer for + # cutting `trimming` to enable `ExtractTrimmingVideoAudio` plugin + staging_dir = os.path.normpath( + tempfile.mkdtemp(prefix="pyblish_tmp_") + ) + instance.data["stagingDir"] = staging_dir + instance.data["families"] += ["trimming"] + return + + # if template pattern in path then fill it with `anatomy_data` + if "{" in editorial_source_root: + editorial_source_root = editorial_source_root.format( + **anatomy_data) + + self.log.debug(f"root: {editorial_source_root}") + # loop `editorial_source_root` and find clip name in folders + # and look for any subset name alternatives + for root, dirs, _files in os.walk(editorial_source_root): + # search only for directories related to clip name + correct_clip_dir = None + for _d_search in dirs: + # avoid all non clip dirs + if _d_search not in clip_name: + continue + # found correct dir for clip + correct_clip_dir = _d_search + + # continue if clip dir was not found + if not correct_clip_dir: + continue + + clip_dir_path = os.path.join(root, correct_clip_dir) + subset_files_items = list() + # list content of clip dir and search for subset items + for subset_item in os.listdir(clip_dir_path): + # avoid all items which are not defined as subsets by name + if subset not in subset_item: + continue + + subset_item_path = os.path.join( + clip_dir_path, subset_item) + # if it is dir store it to `subset_dirs` list + if os.path.isdir(subset_item_path): + subset_dirs.append(subset_item_path) + + # if it is file then store it to `subset_files` list + if os.path.isfile(subset_item_path): + subset_files_items.append(subset_item_path) + + if subset_files_items: + subset_files.update({clip_dir_path: subset_files_items}) + + # break the loop if correct_clip_dir was captured + # no need to cary on if correct folder was found + if correct_clip_dir: + break + + if subset_dirs: + # look all dirs and check for subset name alternatives + for _dir in subset_dirs: + instance_data = deepcopy( + {k: v for k, v in instance.data.items()}) + sub_dir = os.path.basename(_dir) + # if subset name is only alternative then create new instance + if sub_dir != subset: + instance_data = self.duplicate_instance( + instance_data, subset, sub_dir) + + # create all representations + self.create_representations( + os.listdir(_dir), instance_data, _dir) + + if sub_dir == subset: + self.new_instances.append(instance_data) + # instance.data.update(instance_data) + + if subset_files: + unique_subset_names = list() + root_dir = list(subset_files.keys()).pop() + files_list = subset_files[root_dir] + search_pattern = f"({subset}[A-Za-z0-9]+)(?=[\\._\\s])" + for _file in files_list: + pattern = re.compile(search_pattern) + match = pattern.findall(_file) + if not match: + continue + match_subset = match.pop() + if match_subset in unique_subset_names: + continue + unique_subset_names.append(match_subset) + + self.log.debug(f"unique_subset_names: {unique_subset_names}") + + for _un_subs in unique_subset_names: + instance_data = self.duplicate_instance( + instance.data, subset, _un_subs) + + # create all representations + self.create_representations( + [os.path.basename(f) for f in files_list + if _un_subs in f], + instance_data, root_dir) + + # remove the original instance as it had been used only + # as template and is duplicated + self.context.remove(instance) + + # create all instances in self.new_instances into context + for new_instance in self.new_instances: + _new_instance = self.context.create_instance( + new_instance["name"]) + _new_instance.data.update(new_instance) + + def duplicate_instance(self, instance_data, subset, new_subset): + + new_instance_data = dict() + for _key, _value in instance_data.items(): + new_instance_data[_key] = _value + if not isinstance(_value, str): + continue + if subset in _value: + new_instance_data[_key] = _value.replace( + subset, new_subset) + + self.log.info(f"Creating new instance: {new_instance_data['name']}") + self.new_instances.append(new_instance_data) + return new_instance_data + + def create_representations( + self, files_list, instance_data, staging_dir): + """ Create representations from Collection object + """ + # collecting frames for later frame start/end reset + frames = list() + # break down Collection object to collections and reminders + collections, remainder = clique.assemble(files_list) + # add staging_dir to instance_data + instance_data["stagingDir"] = staging_dir + # add representations to instance_data + instance_data["representations"] = list() + + collection_head_name = None + # loop through collections and create representations + for _collection in collections: + ext = _collection.tail[1:] + collection_head_name = _collection.head + frame_start = list(_collection.indexes)[0] + frame_end = list(_collection.indexes)[-1] + repre_data = { + "frameStart": frame_start, + "frameEnd": frame_end, + "name": ext, + "ext": ext, + "files": [item for item in _collection], + "stagingDir": staging_dir + } + + if instance_data.get("keepSequence"): + repre_data_keep = deepcopy(repre_data) + instance_data["representations"].append(repre_data_keep) + + if "review" in instance_data["families"]: + repre_data.update({ + "thumbnail": True, + "frameStartFtrack": frame_start, + "frameEndFtrack": frame_end, + "step": 1, + "fps": self.context.data.get("fps"), + "name": "review", + "tags": ["review", "ftrackreview", "delete"], + }) + instance_data["representations"].append(repre_data) + + # add to frames for frame range reset + frames.append(frame_start) + frames.append(frame_end) + + # loop through reminders and create representations + for _reminding_file in remainder: + ext = os.path.splitext(_reminding_file)[-1][1:] + if ext not in instance_data["extensions"]: + continue + if collection_head_name and ( + (collection_head_name + ext) not in _reminding_file + ) and (ext in ["mp4", "mov"]): + self.log.info(f"Skipping file: {_reminding_file}") + continue + frame_start = 1 + frame_end = 1 + + repre_data = { + "name": ext, + "ext": ext, + "files": _reminding_file, + "stagingDir": staging_dir + } + + # exception for thumbnail + if "thumb" in _reminding_file: + repre_data.update({ + 'name': "thumbnail", + 'thumbnail': True + }) + + # exception for mp4 preview + if ext in ["mp4", "mov"]: + frame_start = 0 + frame_end = ( + (instance_data["frameEnd"] - instance_data["frameStart"]) + + 1) + # add review ftrack family into families + for _family in ["review", "ftrack"]: + if _family not in instance_data["families"]: + instance_data["families"].append(_family) + repre_data.update({ + "frameStart": frame_start, + "frameEnd": frame_end, + "frameStartFtrack": frame_start, + "frameEndFtrack": frame_end, + "step": 1, + "fps": self.context.data.get("fps"), + "name": "review", + "thumbnail": True, + "tags": ["review", "ftrackreview", "delete"], + }) + + # add to frames for frame range reset only if no collection + if not collections: + frames.append(frame_start) + frames.append(frame_end) + + instance_data["representations"].append(repre_data) + + # reset frame start / end + instance_data["frameStart"] = min(frames) + instance_data["frameEnd"] = max(frames) diff --git a/openpype/hosts/traypublisher/plugins/publish/collect_shot_instances.py b/openpype/hosts/traypublisher/plugins/publish/collect_shot_instances.py new file mode 100644 index 0000000000..5abafa498d --- /dev/null +++ b/openpype/hosts/traypublisher/plugins/publish/collect_shot_instances.py @@ -0,0 +1,163 @@ +from pprint import pformat +import pyblish.api +import opentimelineio as otio + + +class CollectShotInstance(pyblish.api.InstancePlugin): + """Collect shot instances and resolve its parent""" + + label = "Collect Shot Instances" + order = pyblish.api.CollectorOrder - 0.09 + + hosts = ["traypublisher"] + families = ["shot"] + + SHARED_KEYS = [ + "asset", + "fps", + "frameStart", + "frameEnd", + "clipIn", + "clipOut", + "sourceIn", + "sourceOut" + ] + + def process(self, instance): + self.log.debug(pformat(instance.data)) + + creator_identifier = instance.data["creator_identifier"] + if "editorial" not in creator_identifier: + return + + # get otio clip object + otio_clip = self._get_otio_clip(instance) + instance.data["otioClip"] = otio_clip + + # first solve the inputs from creator attr + data = self._solve_inputs_to_data(instance) + instance.data.update(data) + + # distribute all shared keys to clips instances + self._distribute_shared_data(instance) + self._solve_hierarchy_context(instance) + + self.log.debug(pformat(instance.data)) + + def _get_otio_clip(self, instance): + context = instance.context + # convert otio clip from string to object + otio_clip_string = instance.data.pop("otioClip") + otio_clip = otio.adapters.read_from_string( + otio_clip_string) + + otio_timeline = context.data["otioTimeline"] + + clips = [ + clip for clip in otio_timeline.each_child( + descended_from_type=otio.schema.Clip) + if clip.name == otio_clip.name + ] + self.log.debug(otio_timeline.each_child( + descended_from_type=otio.schema.Clip)) + + otio_clip = clips.pop() + self.log.debug(f"__ otioclip.parent: {otio_clip.parent}") + + return otio_clip + + def _distribute_shared_data(self, instance): + context = instance.context + + instance_id = instance.data["instance_id"] + + if not context.data.get("editorialSharedData"): + context.data["editorialSharedData"] = {} + + context.data["editorialSharedData"][instance_id] = { + _k: _v for _k, _v in instance.data.items() + if _k in self.SHARED_KEYS + } + + def _solve_inputs_to_data(self, instance): + _cr_attrs = instance.data["creator_attributes"] + workfile_start_frame = _cr_attrs["workfile_start_frame"] + frame_start = _cr_attrs["frameStart"] + frame_end = _cr_attrs["frameEnd"] + frame_dur = frame_end - frame_start + + return { + "asset": _cr_attrs["asset_name"], + "fps": float(_cr_attrs["fps"]), + "handleStart": _cr_attrs["handle_start"], + "handleEnd": _cr_attrs["handle_end"], + "frameStart": workfile_start_frame, + "frameEnd": workfile_start_frame + frame_dur, + "clipIn": _cr_attrs["clipIn"], + "clipOut": _cr_attrs["clipOut"], + "sourceIn": _cr_attrs["sourceIn"], + "sourceOut": _cr_attrs["sourceOut"], + "workfileFrameStart": workfile_start_frame + } + + def _solve_hierarchy_context(self, instance): + context = instance.context + + final_context = ( + context.data["hierarchyContext"] + if context.data.get("hierarchyContext") + else {} + ) + + name = instance.data["asset"] + + # get handles + handle_start = int(instance.data["handleStart"]) + handle_end = int(instance.data["handleEnd"]) + + in_info = { + "entity_type": "Shot", + "custom_attributes": { + "handleStart": handle_start, + "handleEnd": handle_end, + "frameStart": instance.data["frameStart"], + "frameEnd": instance.data["frameEnd"], + "clipIn": instance.data["clipIn"], + "clipOut": instance.data["clipOut"], + "fps": instance.data["fps"] + }, + "tasks": instance.data["tasks"] + } + + parents = instance.data.get('parents', []) + self.log.debug(f"parents: {pformat(parents)}") + + actual = {name: in_info} + + for parent in reversed(parents): + parent_name = parent["entity_name"] + next_dict = { + parent_name: { + "entity_type": parent["entity_type"], + "childs": actual + } + } + actual = next_dict + + final_context = self._update_dict(final_context, actual) + + # adding hierarchy context to instance + context.data["hierarchyContext"] = final_context + self.log.debug(pformat(final_context)) + + def _update_dict(self, ex_dict, new_dict): + for key in ex_dict: + if key in new_dict and isinstance(ex_dict[key], dict): + new_dict[key] = self._update_dict(ex_dict[key], new_dict[key]) + else: + if ex_dict.get(key) and new_dict.get(key): + continue + else: + new_dict[key] = ex_dict[key] + + return new_dict \ No newline at end of file diff --git a/openpype/hosts/standalonepublisher/plugins/publish/extract_trim_video_audio.py b/openpype/plugins/publish/extract_trim_video_audio.py similarity index 98% rename from openpype/hosts/standalonepublisher/plugins/publish/extract_trim_video_audio.py rename to openpype/plugins/publish/extract_trim_video_audio.py index 51dc84e9a2..b0c30283d9 100644 --- a/openpype/hosts/standalonepublisher/plugins/publish/extract_trim_video_audio.py +++ b/openpype/plugins/publish/extract_trim_video_audio.py @@ -14,7 +14,7 @@ class ExtractTrimVideoAudio(openpype.api.Extractor): # must be before `ExtractThumbnailSP` order = pyblish.api.ExtractorOrder - 0.01 label = "Extract Trim Video/Audio" - hosts = ["standalonepublisher"] + hosts = ["standalonepublisher", "traypublisher"] families = ["clip", "trimming"] # make sure it is enabled only if at least both families are available diff --git a/openpype/plugins/publish/validate_asset_docs.py b/openpype/plugins/publish/validate_asset_docs.py index bc1f9b9e6c..daeb442f28 100644 --- a/openpype/plugins/publish/validate_asset_docs.py +++ b/openpype/plugins/publish/validate_asset_docs.py @@ -24,6 +24,10 @@ class ValidateAssetDocs(pyblish.api.InstancePlugin): if instance.data.get("assetEntity"): self.log.info("Instance has set asset document in its data.") + elif "editorial" in instance.data.get("creator_identifier", ""): + # skip if it is editorial + self.log.info("Editorial instance is no need to check...") + else: raise PublishValidationError(( "Instance \"{}\" doesn't have asset document " From 1f18e5c9d35606276b1325f741662cd49e131a64 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Fri, 15 Jul 2022 18:06:37 +0200 Subject: [PATCH 373/785] :recycle: move submodules --- .gitmodules | 8 ++++---- vendor/powershell/BurntToast | 1 - vendor/powershell/PSWriteColor | 1 - vendor/powershell/README.md | 0 4 files changed, 4 insertions(+), 6 deletions(-) delete mode 160000 vendor/powershell/BurntToast delete mode 160000 vendor/powershell/PSWriteColor delete mode 100644 vendor/powershell/README.md diff --git a/.gitmodules b/.gitmodules index b515851c81..6a5d29ec02 100644 --- a/.gitmodules +++ b/.gitmodules @@ -1,7 +1,7 @@ -[submodule "vendor/powershell/BurntToast"] - path = vendor/powershell/BurntToast +[submodule "tools/modules/powershell/BurntToast"] + path = tools/modules/powershell/PSWriteColor url = https://github.com/Windos/BurntToast.git -[submodule "vendor/powershell/PSWriteColor"] - path = vendor/powershell/PSWriteColor +[submodule "tools/modules/powershell/PSWriteColor"] + path = tools/modules/powershell/PSWriteColor url = https://github.com/EvotecIT/PSWriteColor.git diff --git a/vendor/powershell/BurntToast b/vendor/powershell/BurntToast deleted file mode 160000 index ae0acdd870..0000000000 --- a/vendor/powershell/BurntToast +++ /dev/null @@ -1 +0,0 @@ -Subproject commit ae0acdd870a2fd8d9f0d147de22dc36d6c5e399e diff --git a/vendor/powershell/PSWriteColor b/vendor/powershell/PSWriteColor deleted file mode 160000 index 12eda384eb..0000000000 --- a/vendor/powershell/PSWriteColor +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 12eda384ebd7a7954e15855e312215c009c97114 diff --git a/vendor/powershell/README.md b/vendor/powershell/README.md deleted file mode 100644 index e69de29bb2..0000000000 From 3aa38ae0cc7e0799c6b510ad258c8fe7e3315bfe Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 15 Jul 2022 18:11:28 +0200 Subject: [PATCH 374/785] use 'get_last_version_by_subset_name' instead of 'get_latest_version' --- .../plugins/publish/submit_publish_job.py | 27 ++++++++++++++----- 1 file changed, 20 insertions(+), 7 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_publish_job.py b/openpype/modules/deadline/plugins/publish/submit_publish_job.py index 9dd1428a63..9ef80efa50 100644 --- a/openpype/modules/deadline/plugins/publish/submit_publish_job.py +++ b/openpype/modules/deadline/plugins/publish/submit_publish_job.py @@ -10,7 +10,7 @@ import clique import pyblish.api -import openpype.api +from openpype.client import get_last_version_by_subset_name from openpype.pipeline import ( get_representation_path, legacy_io, @@ -333,8 +333,13 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): # get latest version of subset # this will stop if subset wasn't published yet - version = openpype.api.get_latest_version(instance.data.get("asset"), - instance.data.get("subset")) + project_name = legacy_io.active_project() + version = get_last_version_by_subset_name( + project_name, + instance.data.get("subset"), + asset_name=instance.data.get("asset") + ) + # get its files based on extension subset_resources = get_resources(version, representation.get("ext")) r_col, _ = clique.assemble(subset_resources) @@ -1013,9 +1018,12 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): prev_start = None prev_end = None - version = openpype.api.get_latest_version(asset_name=asset, - subset_name=subset - ) + project_name = legacy_io.active_project() + version = get_last_version_by_subset_name( + project_name, + subset, + asset_name=asset + ) # Set prev start / end frames for comparison if not prev_start and not prev_end: @@ -1060,7 +1068,12 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): based on 'publish' template """ if not version: - version = openpype.api.get_latest_version(asset, subset) + project_name = legacy_io.active_project() + version = get_last_version_by_subset_name( + project_name, + subset, + asset_name=asset + ) if version: version = int(version["name"]) + 1 else: From 1a61bd03e027053f39196314f7866e59e004a4e8 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 15 Jul 2022 18:12:39 +0200 Subject: [PATCH 375/785] marked 'get_latest_version' as deprecated --- openpype/lib/avalon_context.py | 37 +++---------------- .../tests/test_lib_restructuralization.py | 1 - 2 files changed, 6 insertions(+), 32 deletions(-) diff --git a/openpype/lib/avalon_context.py b/openpype/lib/avalon_context.py index 1108791953..be5f1117a7 100644 --- a/openpype/lib/avalon_context.py +++ b/openpype/lib/avalon_context.py @@ -17,6 +17,7 @@ from openpype.client import ( get_subsets, get_last_versions, get_last_version_by_subset_id, + get_last_version_by_subset_name, get_representations, get_workfile_info, ) @@ -286,7 +287,7 @@ def get_linked_assets(asset_doc): return list(get_assets(project_name, link_ids)) -@with_pipeline_io +@deprecated("openpype.client.get_last_version_by_subset_name") def get_latest_version(asset_name, subset_name, dbcon=None, project_name=None): """Retrieve latest version from `asset_name`, and `subset_name`. @@ -307,6 +308,8 @@ def get_latest_version(asset_name, subset_name, dbcon=None, project_name=None): if not project_name: if not dbcon: + from openpype.pipeline import legacy_io + log.debug("Using `legacy_io` for query.") dbcon = legacy_io # Make sure is installed @@ -314,37 +317,9 @@ def get_latest_version(asset_name, subset_name, dbcon=None, project_name=None): project_name = dbcon.active_project() - log.debug(( - "Getting latest version for Project: \"{}\" Asset: \"{}\"" - " and Subset: \"{}\"" - ).format(project_name, asset_name, subset_name)) - - # Query asset document id by asset name - asset_doc = get_asset_by_name(project_name, asset_name, fields=["_id"]) - if not asset_doc: - log.info( - "Asset \"{}\" was not found in Database.".format(asset_name) - ) - return None - - subset_doc = get_subset_by_name( - project_name, subset_name, asset_doc["_id"] + return get_last_version_by_subset_name( + project_name, subset_name, asset_name=asset_name ) - if not subset_doc: - log.info( - "Subset \"{}\" was not found in Database.".format(subset_name) - ) - return None - - version_doc = get_last_version_by_subset_id( - project_name, subset_doc["_id"] - ) - if not version_doc: - log.info( - "Subset \"{}\" does not have any version yet.".format(subset_name) - ) - return None - return version_doc def get_workfile_template_key_from_context( diff --git a/openpype/tests/test_lib_restructuralization.py b/openpype/tests/test_lib_restructuralization.py index ccccc76a08..c8952e5a1c 100644 --- a/openpype/tests/test_lib_restructuralization.py +++ b/openpype/tests/test_lib_restructuralization.py @@ -22,7 +22,6 @@ def test_backward_compatibility(printer): from openpype.lib import any_outdated from openpype.lib import get_asset from openpype.lib import get_linked_assets - from openpype.lib import get_latest_version from openpype.lib import get_ffprobe_streams from openpype.hosts.fusion.lib import switch_item From 47079516f892a3bf2e550746c87a73e2ff389524 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Fri, 15 Jul 2022 18:14:38 +0200 Subject: [PATCH 376/785] :truck: set scripts to new path --- .gitmodules | 2 +- tools/build.ps1 | 2 +- tools/build_win_installer.ps1 | 2 +- tools/create_env.ps1 | 2 +- tools/create_zip.ps1 | 2 +- tools/fetch_thirdparty_libs.ps1 | 2 +- tools/make_docs.ps1 | 2 +- tools/modules/powershell/BurntToast | 1 + tools/run_mongo.ps1 | 2 +- tools/run_project_manager.ps1 | 2 +- tools/run_settings.ps1 | 2 +- tools/run_tests.ps1 | 2 +- tools/run_tray.ps1 | 2 +- 13 files changed, 13 insertions(+), 12 deletions(-) create mode 160000 tools/modules/powershell/BurntToast diff --git a/.gitmodules b/.gitmodules index 6a5d29ec02..dfd89cdb3c 100644 --- a/.gitmodules +++ b/.gitmodules @@ -1,5 +1,5 @@ [submodule "tools/modules/powershell/BurntToast"] - path = tools/modules/powershell/PSWriteColor + path = tools/modules/powershell/BurntToast url = https://github.com/Windos/BurntToast.git [submodule "tools/modules/powershell/PSWriteColor"] diff --git a/tools/build.ps1 b/tools/build.ps1 index efb41e6c1b..442328b8dc 100644 --- a/tools/build.ps1 +++ b/tools/build.ps1 @@ -33,7 +33,7 @@ $script_dir = Split-Path -Path $MyInvocation.MyCommand.Definition -Parent $openpype_root = (Get-Item $script_dir).parent.FullName # Install PSWriteColor to support colorized output to terminal -$env:PSModulePath = $env:PSModulePath + ";$($openpype_root)\vendor\powershell" +$env:PSModulePath = $env:PSModulePath + ";$($openpype_root)\tools\modules\powershell" function Start-Progress { param([ScriptBlock]$code) diff --git a/tools/build_win_installer.ps1 b/tools/build_win_installer.ps1 index 8024a5a3b2..d7325edfc4 100644 --- a/tools/build_win_installer.ps1 +++ b/tools/build_win_installer.ps1 @@ -16,7 +16,7 @@ $script_dir = Split-Path -Path $MyInvocation.MyCommand.Definition -Parent $openpype_root = (Get-Item $script_dir).parent.FullName # Install PSWriteColor to support colorized output to terminal -$env:PSModulePath = $env:PSModulePath + ";$($openpype_root)\vendor\powershell" +$env:PSModulePath = $env:PSModulePath + ";$($openpype_root)\tools\modules\powershell" function Start-Progress { param([ScriptBlock]$code) diff --git a/tools/create_env.ps1 b/tools/create_env.ps1 index c0cbe9775b..2b2f0c3904 100644 --- a/tools/create_env.ps1 +++ b/tools/create_env.ps1 @@ -30,7 +30,7 @@ $openpype_root = (Get-Item $script_dir).parent.FullName & git submodule update --init --recursive # Install PSWriteColor to support colorized output to terminal -$env:PSModulePath = $env:PSModulePath + ";$($openpype_root)\vendor\powershell" +$env:PSModulePath = $env:PSModulePath + ";$($openpype_root)\tools\modules\powershell" function Exit-WithCode($exitcode) { diff --git a/tools/create_zip.ps1 b/tools/create_zip.ps1 index b4b66424ca..7b852b7c54 100644 --- a/tools/create_zip.ps1 +++ b/tools/create_zip.ps1 @@ -24,7 +24,7 @@ $script_dir = Split-Path -Path $MyInvocation.MyCommand.Definition -Parent $openpype_root = (Get-Item $script_dir).parent.FullName # Install PSWriteColor to support colorized output to terminal -$env:PSModulePath = $env:PSModulePath + ";$($openpype_root)\vendor\powershell" +$env:PSModulePath = $env:PSModulePath + ";$($openpype_root)\tools\modules\powershell" function Exit-WithCode($exitcode) { # Only exit this host process if it's a child of another PowerShell parent process... diff --git a/tools/fetch_thirdparty_libs.ps1 b/tools/fetch_thirdparty_libs.ps1 index 41a3585ff9..05eb073fdd 100644 --- a/tools/fetch_thirdparty_libs.ps1 +++ b/tools/fetch_thirdparty_libs.ps1 @@ -16,7 +16,7 @@ $script_dir = Split-Path -Path $MyInvocation.MyCommand.Definition -Parent $openpype_root = (Get-Item $script_dir).parent.FullName # Install PSWriteColor to support colorized output to terminal -$env:PSModulePath = $env:PSModulePath + ";$($openpype_root)\vendor\powershell" +$env:PSModulePath = $env:PSModulePath + ";$($openpype_root)\tools\modules\powershell" $env:_INSIDE_OPENPYPE_TOOL = "1" diff --git a/tools/make_docs.ps1 b/tools/make_docs.ps1 index d356f081de..43ecd0c09c 100644 --- a/tools/make_docs.ps1 +++ b/tools/make_docs.ps1 @@ -49,7 +49,7 @@ $script_dir = Split-Path -Path $MyInvocation.MyCommand.Definition -Parent $openpype_root = (Get-Item $script_dir).parent.FullName # Install PSWriteColor to support colorized output to terminal -$env:PSModulePath = $env:PSModulePath + ";$($openpype_root)\vendor\powershell" +$env:PSModulePath = $env:PSModulePath + ";$($openpype_root)\tools\modules\powershell" Write-Host $art -ForegroundColor DarkGreen diff --git a/tools/modules/powershell/BurntToast b/tools/modules/powershell/BurntToast new file mode 160000 index 0000000000..f58c9a26d6 --- /dev/null +++ b/tools/modules/powershell/BurntToast @@ -0,0 +1 @@ +Subproject commit f58c9a26d6ede30ecc7998e92b26974887e945fe diff --git a/tools/run_mongo.ps1 b/tools/run_mongo.ps1 index 934ce67181..b6b091a9d1 100644 --- a/tools/run_mongo.ps1 +++ b/tools/run_mongo.ps1 @@ -16,7 +16,7 @@ $script_dir = Split-Path -Path $MyInvocation.MyCommand.Definition -Parent $openpype_root = (Get-Item $script_dir).parent.FullName # Install PSWriteColor to support colorized output to terminal -$env:PSModulePath = $env:PSModulePath + ";$($openpype_root)\vendor\powershell" +$env:PSModulePath = $env:PSModulePath + ";$($openpype_root)\tools\modules\powershell" $art = @" diff --git a/tools/run_project_manager.ps1 b/tools/run_project_manager.ps1 index 2932358c2a..c1813e4ed9 100644 --- a/tools/run_project_manager.ps1 +++ b/tools/run_project_manager.ps1 @@ -36,7 +36,7 @@ $script_dir = Split-Path -Path $MyInvocation.MyCommand.Definition -Parent $openpype_root = (Get-Item $script_dir).parent.FullName # Install PSWriteColor to support colorized output to terminal -$env:PSModulePath = $env:PSModulePath + ";$($openpype_root)\vendor\powershell" +$env:PSModulePath = $env:PSModulePath + ";$($openpype_root)\tools\modules\powershell" $env:_INSIDE_OPENPYPE_TOOL = "1" diff --git a/tools/run_settings.ps1 b/tools/run_settings.ps1 index 918ea367ab..c74ae1ea3a 100644 --- a/tools/run_settings.ps1 +++ b/tools/run_settings.ps1 @@ -16,7 +16,7 @@ $script_dir = Split-Path -Path $MyInvocation.MyCommand.Definition -Parent $openpype_root = (Get-Item $script_dir).parent.FullName # Install PSWriteColor to support colorized output to terminal -$env:PSModulePath = $env:PSModulePath + ";$($openpype_root)\vendor\powershell" +$env:PSModulePath = $env:PSModulePath + ";$($openpype_root)\tools\modules\powershell" $env:_INSIDE_OPENPYPE_TOOL = "1" diff --git a/tools/run_tests.ps1 b/tools/run_tests.ps1 index 7995c6a8e9..4fa598c413 100644 --- a/tools/run_tests.ps1 +++ b/tools/run_tests.ps1 @@ -16,7 +16,7 @@ $script_dir = Split-Path -Path $MyInvocation.MyCommand.Definition -Parent $openpype_root = (Get-Item $script_dir).parent.FullName # Install PSWriteColor to support colorized output to terminal -$env:PSModulePath = $env:PSModulePath + ";$($openpype_root)\vendor\powershell" +$env:PSModulePath = $env:PSModulePath + ";$($openpype_root)\tools\modules\powershell" function Exit-WithCode($exitcode) { # Only exit this host process if it's a child of another PowerShell parent process... diff --git a/tools/run_tray.ps1 b/tools/run_tray.ps1 index 7dee3d0064..40157c4e81 100644 --- a/tools/run_tray.ps1 +++ b/tools/run_tray.ps1 @@ -15,7 +15,7 @@ $script_dir = Split-Path -Path $MyInvocation.MyCommand.Definition -Parent $openpype_root = (Get-Item $script_dir).parent.FullName # Install PSWriteColor to support colorized output to terminal -$env:PSModulePath = $env:PSModulePath + ";$($openpype_root)\vendor\powershell" +$env:PSModulePath = $env:PSModulePath + ";$($openpype_root)\tools\modules\powershell" $env:_INSIDE_OPENPYPE_TOOL = "1" From fce30b519f7c6df3a59a2c8328d314c2cf7b2aa8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Fri, 15 Jul 2022 18:15:31 +0200 Subject: [PATCH 377/785] :heavy_plus_sign: add PSWriteColor to right place --- tools/modules/powershell/PSWriteColor | 1 + 1 file changed, 1 insertion(+) create mode 160000 tools/modules/powershell/PSWriteColor diff --git a/tools/modules/powershell/PSWriteColor b/tools/modules/powershell/PSWriteColor new file mode 160000 index 0000000000..12eda384eb --- /dev/null +++ b/tools/modules/powershell/PSWriteColor @@ -0,0 +1 @@ +Subproject commit 12eda384ebd7a7954e15855e312215c009c97114 From 623e5d18c0d2290d68104e64656b9a6cd1d48602 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Fri, 15 Jul 2022 18:19:21 +0200 Subject: [PATCH 378/785] :hammer: catch exception for toasts --- tools/build.ps1 | 6 ++++-- tools/build_win_installer.ps1 | 6 +++--- tools/create_env.ps1 | 7 ++++--- tools/fetch_thirdparty_libs.ps1 | 5 ++++- 4 files changed, 15 insertions(+), 9 deletions(-) diff --git a/tools/build.ps1 b/tools/build.ps1 index 442328b8dc..195b2dc75e 100644 --- a/tools/build.ps1 +++ b/tools/build.ps1 @@ -189,6 +189,8 @@ Write-Color -Text ">>> ", "Restoring current directory" -Color Green, Gray Set-Location -Path $current_dir $endTime = [int][double]::Parse((Get-Date -UFormat %s)) -New-BurntToastNotification -AppLogo "$openpype_root/openpype/resources/icons/openpype_icon.png" -Text "OpenPype build complete!", "All done in $($endTime - $startTime) secs. You will find OpenPype and build log in build directory." - +try +{ + New-BurntToastNotification -AppLogo "$openpype_root/openpype/resources/icons/openpype_icon.png" -Text "OpenPype build complete!", "All done in $( $endTime - $startTime ) secs. You will find OpenPype and build log in build directory." +} catch {} Write-Color -Text "*** ", "All done in ", $($endTime - $startTime), " secs. You will find OpenPype and build log in ", "'.\build'", " directory." -Color Green, Gray, White, Gray, White, Gray diff --git a/tools/build_win_installer.ps1 b/tools/build_win_installer.ps1 index d7325edfc4..b9d1ca2d3f 100644 --- a/tools/build_win_installer.ps1 +++ b/tools/build_win_installer.ps1 @@ -171,7 +171,7 @@ if ($LASTEXITCODE -ne 0) { Write-Color -Text ">>> ", "Restoring current directory" -Color Green, Gray Set-Location -Path $current_dir - -New-BurntToastNotification -AppLogo "$openpype_root/openpype/resources/icons/openpype_icon.png" -Text "OpenPype build complete!", "All done. You will find You will find OpenPype installer in '.\build' directory." - +try { + New-BurntToastNotification -AppLogo "$openpype_root/openpype/resources/icons/openpype_icon.png" -Text "OpenPype build complete!", "All done. You will find You will find OpenPype installer in '.\build' directory." +} catch {} Write-Color -Text "*** ", "All done. You will find OpenPype installer in ", "'.\build'", " directory." -Color Green, Gray, White, Gray diff --git a/tools/create_env.ps1 b/tools/create_env.ps1 index 2b2f0c3904..3f956e5c6a 100644 --- a/tools/create_env.ps1 +++ b/tools/create_env.ps1 @@ -180,7 +180,8 @@ if ($LASTEXITCODE -ne 0) { } $endTime = [int][double]::Parse((Get-Date -UFormat %s)) Set-Location -Path $current_dir - -New-BurntToastNotification -AppLogo "$openpype_root/openpype/resources/icons/openpype_icon.png" -Text "OpenPype", "Virtual environment created.", "All done in $($endTime - $startTime) secs." - +try +{ + New-BurntToastNotification -AppLogo "$openpype_root/openpype/resources/icons/openpype_icon.png" -Text "OpenPype", "Virtual environment created.", "All done in $( $endTime - $startTime ) secs." +} catch {} Write-Color -Text ">>> ", "Virtual environment created." -Color Green, White diff --git a/tools/fetch_thirdparty_libs.ps1 b/tools/fetch_thirdparty_libs.ps1 index 05eb073fdd..4df007ad67 100644 --- a/tools/fetch_thirdparty_libs.ps1 +++ b/tools/fetch_thirdparty_libs.ps1 @@ -38,4 +38,7 @@ $startTime = [int][double]::Parse((Get-Date -UFormat %s)) & "$($env:POETRY_HOME)\bin\poetry" run python "$($openpype_root)\tools\fetch_thirdparty_libs.py" $endTime = [int][double]::Parse((Get-Date -UFormat %s)) Set-Location -Path $current_dir -New-BurntToastNotification -AppLogo "$openpype_root/openpype/resources/icons/openpype_icon.png" -Text "OpenPype", "Dependencies downloaded", "All done in $($endTime - $startTime) secs." +try +{ + New-BurntToastNotification -AppLogo "$openpype_root/openpype/resources/icons/openpype_icon.png" -Text "OpenPype", "Dependencies downloaded", "All done in $( $endTime - $startTime ) secs." +} catch {} \ No newline at end of file From 1c71fe206d2a531fbd921fd49368a74a966e3426 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 15 Jul 2022 18:20:01 +0200 Subject: [PATCH 379/785] added interactive command to documentation --- website/docs/admin_openpype_commands.md | 1 + 1 file changed, 1 insertion(+) diff --git a/website/docs/admin_openpype_commands.md b/website/docs/admin_openpype_commands.md index 53b4799d6e..53fc12410f 100644 --- a/website/docs/admin_openpype_commands.md +++ b/website/docs/admin_openpype_commands.md @@ -45,6 +45,7 @@ For more information [see here](admin_use.md#run-openpype). | publish | Pype takes JSON from provided path and use it to publish data in it. | [📑](#publish-arguments) | | extractenvironments | Extract environment variables for entered context to a json file. | [📑](#extractenvironments-arguments) | | run | Execute given python script within OpenPype environment. | [📑](#run-arguments) | +| interactive | Start python like interactive console session. | | | projectmanager | Launch Project Manager UI | [📑](#projectmanager-arguments) | | settings | Open Settings UI | [📑](#settings-arguments) | | standalonepublisher | Open Standalone Publisher UI | [📑](#standalonepublisher-arguments) | From 188556a7253837838e6ae60c72cd2d73b5bd6da5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= <33513211+antirotor@users.noreply.github.com> Date: Fri, 15 Jul 2022 18:34:43 +0200 Subject: [PATCH 380/785] :pencil2: fix typos in arguments --- tools/run_mongo.ps1 | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tools/run_mongo.ps1 b/tools/run_mongo.ps1 index b6b091a9d1..c64ff75969 100644 --- a/tools/run_mongo.ps1 +++ b/tools/run_mongo.ps1 @@ -50,7 +50,7 @@ function Exit-WithCode($exitcode) { function Find-Mongo ($preferred_version) { $defaultPath = "C:\Program Files\MongoDB\Server" - Write-Color -Text ">>> ", "Detecting MongoDB ... " -Color Geen, Gray -NoNewline + Write-Color -Text ">>> ", "Detecting MongoDB ... " -Color Green, Gray -NoNewline if (-not (Get-Command "mongod" -ErrorAction SilentlyContinue)) { if(Test-Path "$($defaultPath)\*\bin\mongod.exe" -PathType Leaf) { # we have mongo server installed on standard Windows location @@ -61,7 +61,7 @@ function Find-Mongo ($preferred_version) { Write-Color -Text "OK" -Color Green $use_version = $mongoVersions[-1] foreach ($v in $mongoVersions) { - Write-Color -Text " - found [ ", $v, " ]" - Color Cyan, White, Cyan -NoNewLine + Write-Color -Text " - found [ ", $v, " ]" -Color Cyan, White, Cyan -NoNewLine $version = Split-Path $v -Leaf if ($preferred_version -eq $version) { @@ -110,6 +110,6 @@ $preferred_version = "5.0" $mongoPath = Find-Mongo $preferred_version Write-Color -Text ">>> ", "Using DB path: ", "[ ", "$($dbpath)", " ]" -Color Green, Gray, Cyan, White, Cyan -Write-Color -Text ">>> ", "Port: ", "[ ", "$($port)", " ]", -Color Green, Gray, Cyan, White, Cyan +Write-Color -Text ">>> ", "Port: ", "[ ", "$($port)", " ]" -Color Green, Gray, Cyan, White, Cyan Start-Process -FilePath $mongopath "--dbpath $($dbpath) --port $($port)" -PassThru | Out-Null From 539d4c8fa99c26d4e7b1b226dd146b60d04b1622 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 15 Jul 2022 18:55:20 +0200 Subject: [PATCH 381/785] modify docstring --- openpype/client/entities.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/client/entities.py b/openpype/client/entities.py index 468f569c7f..cc22a0a835 100644 --- a/openpype/client/entities.py +++ b/openpype/client/entities.py @@ -568,7 +568,7 @@ def version_is_latest(project_name, version_id): Args: project_name (str):Name of project where to look for queried entities. - version_id (str|ObjectId): Version id which is checked. + version_id (Union[str, ObjectId]): Version id which is checked. Returns: bool: True if is latest version from subset else False. From ace2bf4ecb1ac6eb43205090f9c670a9332d3927 Mon Sep 17 00:00:00 2001 From: OpenPype Date: Sat, 16 Jul 2022 03:49:48 +0000 Subject: [PATCH 382/785] [Automated] Bump version --- CHANGELOG.md | 54 +++++++++++++++++++++++---------------------- openpype/version.py | 2 +- pyproject.toml | 2 +- 3 files changed, 30 insertions(+), 28 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index cc5bf39a29..95427e9ea9 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,8 +1,35 @@ # Changelog +## [3.12.2-nightly.1](https://github.com/pypeclub/OpenPype/tree/HEAD) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.12.1...HEAD) + +**🚀 Enhancements** + +- Ftrack: Automatic daily review session creation can define trigger hour [\#3516](https://github.com/pypeclub/OpenPype/pull/3516) +- Ftrack: add source into Note [\#3509](https://github.com/pypeclub/OpenPype/pull/3509) +- Ftrack: Trigger custom ftrack topic of project structure creation [\#3506](https://github.com/pypeclub/OpenPype/pull/3506) +- Settings UI: Add extract to file action on project view [\#3505](https://github.com/pypeclub/OpenPype/pull/3505) +- General: Event system [\#3499](https://github.com/pypeclub/OpenPype/pull/3499) +- NewPublisher: Keep plugins with mismatch target in report [\#3498](https://github.com/pypeclub/OpenPype/pull/3498) +- Nuke: load clip with options from settings [\#3497](https://github.com/pypeclub/OpenPype/pull/3497) +- Migrate basic families to the new Tray Publisher [\#3469](https://github.com/pypeclub/OpenPype/pull/3469) + +**🐛 Bug fixes** + +- General: Fix hash of centos oiio archive [\#3519](https://github.com/pypeclub/OpenPype/pull/3519) +- TrayPublisher: Simple creation enhancements and fixes [\#3513](https://github.com/pypeclub/OpenPype/pull/3513) +- NewPublisher: Publish attributes are properly collected [\#3510](https://github.com/pypeclub/OpenPype/pull/3510) +- TrayPublisher: Make sure host name is filled [\#3504](https://github.com/pypeclub/OpenPype/pull/3504) +- NewPublisher: Groups work and enum multivalue [\#3501](https://github.com/pypeclub/OpenPype/pull/3501) + +**🔀 Refactored code** + +- TimersManager: Use query functions [\#3495](https://github.com/pypeclub/OpenPype/pull/3495) + ## [3.12.1](https://github.com/pypeclub/OpenPype/tree/3.12.1) (2022-07-13) -[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.12.0...3.12.1) +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.12.1-nightly.6...3.12.1) ### 📖 Documentation @@ -45,8 +72,6 @@ - LogViewer: Escape html characters in log message [\#3443](https://github.com/pypeclub/OpenPype/pull/3443) - Nuke: Slate frame is integrated [\#3427](https://github.com/pypeclub/OpenPype/pull/3427) - Maya: Camera extra data - additional fix for \#3304 [\#3386](https://github.com/pypeclub/OpenPype/pull/3386) -- Maya: Handle excluding `model` family from frame range validator. [\#3370](https://github.com/pypeclub/OpenPype/pull/3370) -- Harmony: audio validator has wrong logic [\#3364](https://github.com/pypeclub/OpenPype/pull/3364) **🔀 Refactored code** @@ -76,7 +101,6 @@ - Webserver: Added CORS middleware [\#3422](https://github.com/pypeclub/OpenPype/pull/3422) - Attribute Defs UI: Files widget show what is allowed to drop in [\#3411](https://github.com/pypeclub/OpenPype/pull/3411) -- General: Add ability to change user value for templates [\#3366](https://github.com/pypeclub/OpenPype/pull/3366) **🐛 Bug fixes** @@ -87,10 +111,7 @@ - Nuke: Collect representation files based on Write [\#3407](https://github.com/pypeclub/OpenPype/pull/3407) - General: Filter representations before integration start [\#3398](https://github.com/pypeclub/OpenPype/pull/3398) - Maya: look collector typo [\#3392](https://github.com/pypeclub/OpenPype/pull/3392) -- TVPaint: Make sure exit code is set to not None [\#3382](https://github.com/pypeclub/OpenPype/pull/3382) - Maya: vray device aspect ratio fix [\#3381](https://github.com/pypeclub/OpenPype/pull/3381) -- Flame: bunch of publishing issues [\#3377](https://github.com/pypeclub/OpenPype/pull/3377) -- Harmony: added unc path to zifile command in Harmony [\#3372](https://github.com/pypeclub/OpenPype/pull/3372) **🔀 Refactored code** @@ -101,30 +122,11 @@ - Hiero: Use client query functions [\#3393](https://github.com/pypeclub/OpenPype/pull/3393) - Nuke: Use client query functions [\#3391](https://github.com/pypeclub/OpenPype/pull/3391) - Maya: Use client query functions [\#3385](https://github.com/pypeclub/OpenPype/pull/3385) -- Harmony: Use client query functions [\#3378](https://github.com/pypeclub/OpenPype/pull/3378) -- Celaction: Use client query functions [\#3376](https://github.com/pypeclub/OpenPype/pull/3376) -- Photoshop: Use client query functions [\#3375](https://github.com/pypeclub/OpenPype/pull/3375) -- AfterEffects: Use client query functions [\#3374](https://github.com/pypeclub/OpenPype/pull/3374) - -**Merged pull requests:** - -- Sync Queue: Added far future value for null values for dates [\#3371](https://github.com/pypeclub/OpenPype/pull/3371) -- Maya - added support for single frame playblast review [\#3369](https://github.com/pypeclub/OpenPype/pull/3369) ## [3.11.1](https://github.com/pypeclub/OpenPype/tree/3.11.1) (2022-06-20) [Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.11.1-nightly.1...3.11.1) -**🚀 Enhancements** - -- Pyblish Pype: Hiding/Close issues [\#3367](https://github.com/pypeclub/OpenPype/pull/3367) - -**🐛 Bug fixes** - -- Nuke: bake streams with slate on farm [\#3368](https://github.com/pypeclub/OpenPype/pull/3368) -- Nuke: Fix missing variable in extract thumbnail [\#3363](https://github.com/pypeclub/OpenPype/pull/3363) -- Nuke: Fix precollect writes [\#3361](https://github.com/pypeclub/OpenPype/pull/3361) - ## [3.11.0](https://github.com/pypeclub/OpenPype/tree/3.11.0) (2022-06-17) [Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.11.0-nightly.4...3.11.0) diff --git a/openpype/version.py b/openpype/version.py index c7b0de0381..e9206379e1 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.12.1" +__version__ = "3.12.2-nightly.1" diff --git a/pyproject.toml b/pyproject.toml index 078503a284..19d65b50f9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "OpenPype" -version = "3.12.1" # OpenPype +version = "3.12.2-nightly.1" # OpenPype description = "Open VFX and Animation pipeline with support." authors = ["OpenPype Team "] license = "MIT License" From fd8a801f40050995d49e3cd8d885ec2cb6951152 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 18 Jul 2022 10:03:34 +0200 Subject: [PATCH 383/785] fix typo --- openpype/client/entities.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/client/entities.py b/openpype/client/entities.py index cc22a0a835..81640f75e5 100644 --- a/openpype/client/entities.py +++ b/openpype/client/entities.py @@ -580,7 +580,7 @@ def version_is_latest(project_name, version_id): version_doc = get_version_by_id( project_name, version_id, fields=["_id", "type", "parent"] ) - # What to de when version is not found? + # What to do when version is not found? if not version_doc: return False From 971aef4342e8353a83fc2fe9bf2500a48069aa3d Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 18 Jul 2022 10:35:56 +0200 Subject: [PATCH 384/785] Removed query list from python file --- openpype/client/entities.py | 604 ------------------------------------ 1 file changed, 604 deletions(-) diff --git a/openpype/client/entities.py b/openpype/client/entities.py index 9d65355d1b..38552d9a56 100644 --- a/openpype/client/entities.py +++ b/openpype/client/entities.py @@ -1348,622 +1348,18 @@ def get_workfile_info( - openpype/hosts/maya/api/shader_definition_editor.py - openpype/hosts/maya/plugins/publish/validate_model_name.py -## Global launch hooks -- openpype/hooks/pre_global_host_data.py - Query: - - project - - asset - -## Global load plugins -- openpype/plugins/load/delete_old_versions.py - Query: - - versions - - representations -- openpype/plugins/load/delivery.py - Query: - - representations - ## Global publish plugins -- openpype/plugins/publish/collect_avalon_entities.py - Query: - - asset - - project -- openpype/plugins/publish/collect_anatomy_instance_data.py - Query: - - assets - - subsets - - last version -- openpype/plugins/publish/collect_scene_loaded_versions.py - Query: - - representations - openpype/plugins/publish/extract_hierarchy_avalon.py - Query: - - asset - - assets - - project Create: - asset Update: - asset -- openpype/plugins/publish/integrate_hero_version.py - Query: - - version - - hero version - - representations -- openpype/plugins/publish/integrate_new.py - Query: - - asset - - subset - - version - - representations -- openpype/plugins/publish/integrate_thumbnail.py - Query: - - version -- openpype/plugins/publish/validate_editorial_asset_name.py - Query: - - assets ## Lib -- openpype/lib/applications.py - Query: - - project - - asset - openpype/lib/avalon_context.py - Query: - - project - - asset - - linked assets (new function get_linked_assets?) - - subset - - subsets - - version - - versions - - last version - - representations - - linked representations (new function get_linked_ids_for_representations) Update: - workfile data -- openpype/lib/plugin_tools.py - Query: - - asset - openpype/lib/project_backpack.py - Query: - - project - - everything from mongo Update: - project -- openpype/lib/usdlib.py - Query: - - project - - asset - -## Pipeline -- openpype/pipeline/load/utils.py - Query: - - project - - assets - - subsets - - version - - versions - - representation - - representations -- openpype/pipeline/mongodb.py - Query: - - project -- openpype/pipeline/thumbnail.py - Query: - - project - -## Hosts -### Aftereffects -- openpype/hosts/aftereffects/plugins/create/workfile_creator.py - Query: - - asset - -### Blender -- openpype/hosts/blender/api/pipeline.py - Query: - - asset -- openpype/hosts/blender/plugins/publish/extract_layout.py - Query: - - representation - -### Celaction -- openpype/hosts/celaction/plugins/publish/collect_audio.py - Query: - - subsets - - last versions - - representations - -### Fusion -- openpype/hosts/fusion/api/lib.py - Query: - - asset - - subset - - version - - representation -- openpype/hosts/fusion/plugins/load/load_sequence.py - Query: - - version -- openpype/hosts/fusion/scripts/fusion_switch_shot.py - Query: - - project - - asset - - versions -- openpype/hosts/fusion/utility_scripts/switch_ui.py - Query: - - assets - -### Harmony -- openpype/hosts/harmony/api/pipeline.py - Query: - - representation - -### Hiero -- openpype/hosts/hiero/api/lib.py - Query: - - project - - version - - versions - - representation -- openpype/hosts/hiero/api/tags.py - Query: - - task types - - assets -- openpype/hosts/hiero/plugins/load/load_clip.py - Query: - - version - - versions -- openpype/hosts/hiero/plugins/publish_old_workflow/collect_assetbuilds.py - Query: - - assets - -### Houdini -- openpype/hosts/houdini/api/lib.py - Query: - - asset -- openpype/hosts/houdini/api/usd.py - Query: - - asset -- openpype/hosts/houdini/plugins/create/create_hda.py - Query: - - asset - - subsets -- openpype/hosts/houdini/plugins/publish/collect_usd_bootstrap.py - Query: - - asset - - subset -- openpype/hosts/houdini/plugins/publish/extract_usd_layered.py - Query: - - asset - - subset - - version - - representation -- openpype/hosts/houdini/plugins/publish/validate_usd_shade_model_exists.py - Query: - - asset - - subset -- openpype/hosts/houdini/vendor/husdoutputprocessors/avalon_uri_processor.py - Query: - - project - - asset - -### Maya -- openpype/hosts/maya/api/action.py - Query: - - asset -- openpype/hosts/maya/api/commands.py - Query: - - asset - - project -- openpype/hosts/maya/api/lib.py - Query: - - project - - asset - - subset - - subsets - - version - - representation -- openpype/hosts/maya/api/setdress.py - Query: - - version - - representation -- openpype/hosts/maya/plugins/inventory/import_modelrender.py - Query: - - representation -- openpype/hosts/maya/plugins/load/load_audio.py - Query: - - asset - - subset - - version -- openpype/hosts/maya/plugins/load/load_image_plane.py - Query: - - asset - - subset - - version -- openpype/hosts/maya/plugins/load/load_look.py - Query: - - representation -- openpype/hosts/maya/plugins/load/load_vrayproxy.py - Query: - - representation -- openpype/hosts/maya/plugins/load/load_yeti_cache.py - Query: - - representation -- openpype/hosts/maya/plugins/publish/collect_review.py - Query: - - subsets -- openpype/hosts/maya/plugins/publish/validate_node_ids_in_database.py - Query: - - assets -- openpype/hosts/maya/plugins/publish/validate_node_ids_related.py - Query: - - asset -- openpype/hosts/maya/plugins/publish/validate_renderlayer_aovs.py - Query: - - asset - - subset - -### Nuke -- openpype/hosts/nuke/api/command.py - Query: - - project - - asset -- openpype/hosts/nuke/api/lib.py - Query: - - project - - asset - - version - - versions - - representation -- openpype/hosts/nuke/plugins/load/load_backdrop.py - Query: - - version - - versions -- openpype/hosts/nuke/plugins/load/load_camera_abc.py - Query: - - version - - versions -- openpype/hosts/nuke/plugins/load/load_clip.py - Query: - - version - - versions -- openpype/hosts/nuke/plugins/load/load_effects_ip.py - Query: - - version - - versions -- openpype/hosts/nuke/plugins/load/load_effects.py - Query: - - version - - versions -- openpype/hosts/nuke/plugins/load/load_gizmo_ip.py - Query: - - version - - versions -- openpype/hosts/nuke/plugins/load/load_gizmo.py - Query: - - version - - versions -- openpype/hosts/nuke/plugins/load/load_image.py - Query: - - version - - versions -- openpype/hosts/nuke/plugins/load/load_model.py - Query: - - version - - versions -- openpype/hosts/nuke/plugins/load/load_script_precomp.py - Query: - - version - - versions -- openpype/hosts/nuke/plugins/publish/collect_reads.py - Query: - - asset -- openpype/hosts/nuke/plugins/publish/precollect_instances.py - Query: - - asset -- openpype/hosts/nuke/plugins/publish/precollect_writes.py - Query: - - representation -- openpype/hosts/nuke/plugins/publish/validate_script.py - Query: - - asset - - project - -### Photoshop -- openpype/hosts/photoshop/plugins/create/workfile_creator.py - Query: - - asset - -### Resolve -- openpype/hosts/resolve/plugins/load/load_clip.py - Query: - - version - - versions - -### Standalone publisher -- openpype/hosts/standalonepublisher/plugins/publish/collect_bulk_mov_instances.py - Query: - - asset -- openpype/hosts/standalonepublisher/plugins/publish/collect_matching_asset.py - Query: - - assets -- openpype/hosts/standalonepublisher/plugins/publish/collect_hierarchy.py - Query: - - project - - asset -- openpype/hosts/standalonepublisher/plugins/publish/validate_task_existence.py - Query: - - assets - -### TVPaint -- openpype/hosts/tvpaint/api/pipeline.py - Query: - - project - - asset -- openpype/hosts/tvpaint/plugins/load/load_workfile.py - Query: - - project - - asset -- openpype/hosts/tvpaint/plugins/publish/collect_instances.py - Query: - - asset -- openpype/hosts/tvpaint/plugins/publish/collect_scene_render.py - Query: - - asset -- openpype/hosts/tvpaint/plugins/publish/collect_workfile.py - Query: - - asset - -### Unreal -- openpype/hosts/unreal/plugins/load/load_camera.py - Query: - - asset - - assets -- openpype/hosts/unreal/plugins/load/load_layout.py - Query: - - asset - - assets -- openpype/hosts/unreal/plugins/publish/extract_layout.py - Query: - - representation - -### Webpublisher -- openpype/hosts/webpublisher/webserver_service/webpublish_routes.py - Query: - - assets -- openpype/hosts/webpublisher/plugins/publish/collect_published_files.py - Query: - - last versions - -## Tools -openpype/tools/assetlinks/widgets.py -- SimpleLinkView - Query: - - get_versions - - get_subsets - - get_assets - - get_output_link_versions - -openpype/tools/creator/window.py -- CreatorWindow - Query: - - get_asset_by_name - - get_subsets - -openpype/tools/launcher/models.py -- LauncherModel - Query: - - get_project - - get_assets - -openpype/tools/libraryloader/app.py -- LibraryLoaderWindow - Query: - - get_project - -openpype/tools/loader/app.py -- LoaderWindow - Query: - - get_project -- show - Query: - - get_projects - -openpype/tools/loader/model.py -- SubsetsModel - Query: - - get_assets - - get_subsets - - get_last_versions - - get_versions - - get_hero_versions - - get_version_by_name -- RepresentationModel - Query: - - get_representations - - sync server specific queries (separated into multiple functions?) - - NOT REPLACED - -openpype/tools/loader/widgets.py -- FamilyModel - Query: - - get_subset_families -- VersionTextEdit - Query: - - get_subset_by_id - - get_version_by_id -- SubsetWidget - Query: - - get_subsets - - get_representations - Update: - - Subset groups (combination of asset id and subset names) -- RepresentationWidget - Query: - - get_subsets - - get_versions - - get_representations -- ThumbnailWidget - Query: - - get_thumbnail_id_from_source - - get_thumbnail - -openpype/tools/mayalookassigner/app.py -- MayaLookAssignerWindow - Query: - - get_last_version_by_subset_id - -openpype/tools/mayalookassigner/commands.py -- create_items_from_nodes - Query: - - get_asset_by_id - -openpype/tools/mayalookassigner/vray_proxies.py -- get_look_relationships - Query: - - get_representation_by_name -- load_look - Query: - - get_representation_by_name -- vrayproxy_assign_look - Query: - - get_last_version_by_subset_name - -openpype/tools/project_manager/project_manager/model.py -- HierarchyModel - Query: - - get_asset_ids_with_subsets - - get_project - - get_assets - -openpype/tools/project_manager/project_manager/view.py -- ProjectDocCache - Query: - - get_project - -openpype/tools/project_manager/project_manager/widgets.py -- CreateProjectDialog - Query: - - get_projects - -openpype/tools/publisher/widgets/create_dialog.py -- CreateDialog - Query: - - get_asset_by_name - - get_subsets - -openpype/tools/publisher/control.py -- AssetDocsCache - Query: - - get_assets - -openpype/tools/sceneinventory/model.py -- InventoryModel - Query: - - get_asset_by_id - - get_subset_by_id - - get_version_by_id - - get_last_version_by_subset_id - - get_representation - -openpype/tools/sceneinventory/switch_dialog.py -- SwitchAssetDialog - Query: - - get_asset_by_name - - get_assets - - get_subset_by_name - - get_subsets - - get_versions - - get_hero_versions - - get_last_versions - - get_representations - -openpype/tools/sceneinventory/view.py -- SceneInventoryView - Query: - - get_version_by_id - - get_versions - - get_hero_versions - - get_representation_by_id - - get_representations - -openpype/tools/standalonepublish/widgets/model_asset.py -- AssetModel - Query: - - get_assets - -openpype/tools/standalonepublish/widgets/widget_asset.py -- AssetWidget - Query: - - get_project - - get_asset_by_id - -openpype/tools/standalonepublish/widgets/widget_family.py -- FamilyWidget - Query: - - get_asset_by_name - - get_subset_by_name - - get_subsets - - get_last_version_by_subset_id - -openpype/tools/standalonepublish/app.py -- Window - Query: - - get_asset_by_id - -openpype/tools/texture_copy/app.py -- TextureCopy - Query: - - get_project - - get_asset_by_name - -openpype/tools/workfiles/files_widget.py -- FilesWidget - Query: - - get_asset_by_id - -openpype/tools/workfiles/model.py -- PublishFilesModel - Query: - - get_subsets - - get_versions - - get_representations - -openpype/tools/workfiles/save_as_dialog.py -- build_workfile_data - Query: - - get_project - - get_asset_by_name - -openpype/tools/workfiles/window.py -- Window - Query: - - get_asset_by_id - - get_asset_by_name - -openpype/tools/utils/assets_widget.py -- AssetModel - Query: - - get_project - - get_assets - -openpype/tools/utils/delegates.py -- VersionDelegate - Query: - - get_versions - - get_hero_versions - -openpype/tools/utils/lib.py -- GroupsConfig - Query: - - get_project -- FamilyConfigCache - Query: - - get_asset_by_name - -openpype/tools/utils/tasks_widget.py -- TasksModel - Query: - - get_project - - get_asset_by_id """ From 44611981d40b02065c0a03509b7d45c25900fe66 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 18 Jul 2022 10:37:16 +0200 Subject: [PATCH 385/785] modified return types in docstrings --- openpype/client/entities.py | 176 +++++++++++++++++++----------------- 1 file changed, 91 insertions(+), 85 deletions(-) diff --git a/openpype/client/entities.py b/openpype/client/entities.py index 38552d9a56..ebd9b4821d 100644 --- a/openpype/client/entities.py +++ b/openpype/client/entities.py @@ -117,8 +117,8 @@ def get_asset_by_id(project_name, asset_id, fields=None): Args: project_name (str): Name of project where to look for queried entities. - asset_id (str|ObjectId): Asset's id. - fields (list[str]): Fields that should be returned. All fields are + asset_id (Union[str, ObjectId]): Asset's id. + fields (Iterable[str]): Fields that should be returned. All fields are returned if 'None' is passed. Returns: @@ -141,7 +141,7 @@ def get_asset_by_name(project_name, asset_name, fields=None): Args: project_name (str): Name of project where to look for queried entities. asset_name (str): Asset's name. - fields (list[str]): Fields that should be returned. All fields are + fields (Iterable[str]): Fields that should be returned. All fields are returned if 'None' is passed. Returns: @@ -178,12 +178,13 @@ def _get_assets( Args: project_name (str): Name of project where to look for queried entities. - asset_ids (list[str|ObjectId]): Asset ids that should be found. - asset_names (list[str]): Name assets that should be found. - parent_ids (list[str|ObjectId]): Parent asset ids. + asset_ids (Iterable[Union[str, ObjectId]]): Asset ids that should + be found. + asset_names (Iterable[str]): Name assets that should be found. + parent_ids (Iterable[Union[str, ObjectId]]): Parent asset ids. standard (bool): Query standart assets (type 'asset'). archived (bool): Query archived assets (type 'archived_asset'). - fields (list[str]): Fields that should be returned. All fields are + fields (Iterable[str]): Fields that should be returned. All fields are returned if 'None' is passed. Returns: @@ -244,11 +245,12 @@ def get_assets( Args: project_name (str): Name of project where to look for queried entities. - asset_ids (list[str|ObjectId]): Asset ids that should be found. - asset_names (list[str]): Name assets that should be found. - parent_ids (list[str|ObjectId]): Parent asset ids. + asset_ids (Iterable[Union[str, ObjectId]]): Asset ids that should + be found. + asset_names (Iterable[str]): Name assets that should be found. + parent_ids (Iterable[Union[str, ObjectId]]): Parent asset ids. archived (bool): Add also archived assets. - fields (list[str]): Fields that should be returned. All fields are + fields (Iterable[str]): Fields that should be returned. All fields are returned if 'None' is passed. Returns: @@ -283,10 +285,11 @@ def get_archived_assets( Args: project_name (str): Name of project where to look for queried entities. - asset_ids (list[str|ObjectId]): Asset ids that should be found. - asset_names (list[str]): Name assets that should be found. - parent_ids (list[str|ObjectId]): Parent asset ids. - fields (list[str]): Fields that should be returned. All fields are + asset_ids (Iterable[Union[str, ObjectId]]): Asset ids that should + be found. + asset_names (Iterable[str]): Name assets that should be found. + parent_ids (Iterable[Union[str, ObjectId]]): Parent asset ids. + fields (Iterable[str]): Fields that should be returned. All fields are returned if 'None' is passed. Returns: @@ -304,10 +307,11 @@ def get_asset_ids_with_subsets(project_name, asset_ids=None): Args: project_name (str): Name of project where to look for queried entities. - asset_ids (list[str|ObjectId]): Look only for entered asset ids. + asset_ids (Iterable[Union[str, ObjectId]]): Look only for entered + asset ids. Returns: - List[ObjectId]: Asset ids that have existing subsets. + Iterable[ObjectId]: Asset ids that have existing subsets. """ subset_query = { @@ -345,8 +349,8 @@ def get_subset_by_id(project_name, subset_id, fields=None): Args: project_name (str): Name of project where to look for queried entities. - subset_id (str|ObjectId): Id of subset which should be found. - fields (list[str]): Fields that should be returned. All fields are + subset_id (Union[str, ObjectId]): Id of subset which should be found. + fields (Iterable[str]): Fields that should be returned. All fields are returned if 'None' is passed. Returns: @@ -369,8 +373,8 @@ def get_subset_by_name(project_name, subset_name, asset_id, fields=None): Args: project_name (str): Name of project where to look for queried entities. subset_name (str): Name of subset. - asset_id (str|ObjectId): Id of parent asset. - fields (list[str]): Fields that should be returned. All fields are + asset_id (Union[str, ObjectId]): Id of parent asset. + fields (Iterable[str]): Fields that should be returned. All fields are returned if 'None' is passed. Returns: @@ -409,16 +413,16 @@ def get_subsets( Args: project_name (str): Name of project where to look for queried entities. - subset_ids (list[str|ObjectId]): Subset ids that should be queried. + subset_ids (Iterable[Union[str, ObjectId]]): Subset ids that should be + queried. Filter ignored if 'None' is passed. + subset_names (Iterable[str]): Subset names that should be queried. Filter ignored if 'None' is passed. - subset_names (list[str]): Subset names that should be queried. - Filter ignored if 'None' is passed. - asset_ids (list[str|ObjectId]): Asset ids under which should look for - the subsets. Filter ignored if 'None' is passed. - names_by_asset_ids (dict[ObjectId, list[str]]): Complex filtering + asset_ids (Iterable[Union[str, ObjectId]]): Asset ids under which should + look for the subsets. Filter ignored if 'None' is passed. + names_by_asset_ids (dict[ObjectId, List[str]]): Complex filtering using asset ids and list of subset names under the asset. archived (bool): Look for archived subsets too. - fields (list[str]): Fields that should be returned. All fields are + fields (Iterable[str]): Fields that should be returned. All fields are returned if 'None' is passed. Returns: @@ -472,8 +476,8 @@ def get_subset_families(project_name, subset_ids=None): Args: project_name (str): Name of project where to look for queried entities. - subset_ids (list[str|ObjectId]): Subset ids that should be queried. - All subsets from project are used if 'None' is passed. + subset_ids (Iterable[Union[str, ObjectId]]): Subset ids that should + be queried. All subsets from project are used if 'None' is passed. Returns: set[str]: Main families of matching subsets. @@ -508,8 +512,8 @@ def get_version_by_id(project_name, version_id, fields=None): Args: project_name (str): Name of project where to look for queried entities. - version_id (str|ObjectId): Id of version which should be found. - fields (list[str]): Fields that should be returned. All fields are + version_id (Union[str, ObjectId]): Id of version which should be found. + fields (Iterable[str]): Fields that should be returned. All fields are returned if 'None' is passed. Returns: @@ -535,8 +539,8 @@ def get_version_by_name(project_name, version, subset_id, fields=None): Args: project_name (str): Name of project where to look for queried entities. version (int): name of version entity (it's version). - subset_id (str|ObjectId): Id of version which should be found. - fields (list[str]): Fields that should be returned. All fields are + subset_id (Union[str, ObjectId]): Id of version which should be found. + fields (Iterable[str]): Fields that should be returned. All fields are returned if 'None' is passed. Returns: @@ -621,14 +625,14 @@ def get_versions( Args: project_name (str): Name of project where to look for queried entities. - version_ids (list[str|ObjectId]): Version ids that will be queried. + version_ids (Iterable[Union[str, ObjectId]]): Version ids that will + be queried. Filter ignored if 'None' is passed. + subset_ids (Iterable[str]): Subset ids that will be queried. Filter ignored if 'None' is passed. - subset_ids (list[str]): Subset ids that will be queried. - Filter ignored if 'None' is passed. - versions (list[int]): Version names (as integers). + versions (Iterable[int]): Version names (as integers). Filter ignored if 'None' is passed. hero (bool): Look also for hero versions. - fields (list[str]): Fields that should be returned. All fields are + fields (Iterable[str]): Fields that should be returned. All fields are returned if 'None' is passed. Returns: @@ -651,8 +655,9 @@ def get_hero_version_by_subset_id(project_name, subset_id, fields=None): Args: project_name (str): Name of project where to look for queried entities. - subset_id (str|ObjectId): Subset id under which is hero version. - fields (list[str]): Fields that should be returned. All fields are + subset_id (Union[str, ObjectId]): Subset id under which + is hero version. + fields (Iterable[str]): Fields that should be returned. All fields are returned if 'None' is passed. Returns: @@ -681,8 +686,8 @@ def get_hero_version_by_id(project_name, version_id, fields=None): Args: project_name (str): Name of project where to look for queried entities. - version_id (str|ObjectId): Hero version id. - fields (list[str]): Fields that should be returned. All fields are + version_id (Union[str, ObjectId]): Hero version id. + fields (Iterable[str]): Fields that should be returned. All fields are returned if 'None' is passed. Returns: @@ -716,11 +721,11 @@ def get_hero_versions( Args: project_name (str): Name of project where to look for queried entities. - subset_ids (list[str|ObjectId]): Subset ids for which should look for - hero versions. Filter ignored if 'None' is passed. - version_ids (list[str|ObjectId]): Hero version ids. Filter ignored if - 'None' is passed. - fields (list[str]): Fields that should be returned. All fields are + subset_ids (Iterable[Union[str, ObjectId]]): Subset ids for which + should look for hero versions. Filter ignored if 'None' is passed. + version_ids (Iterable[Union[str, ObjectId]]): Hero version ids. Filter + ignored if 'None' is passed. + fields (Iterable[str]): Fields that should be returned. All fields are returned if 'None' is passed. Returns: @@ -746,13 +751,13 @@ def get_output_link_versions(project_name, version_id, fields=None): Args: project_name (str): Name of project where to look for queried entities. - version_id (str|ObjectId): Version id which can be used as input link - for other versions. - fields (list[str]): Fields that should be returned. All fields are + version_id (Union[str, ObjectId]): Version id which can be used + as input link for other versions. + fields (Iterable[str]): Fields that should be returned. All fields are returned if 'None' is passed. Returns: - Cursor|list: Iterable cursor yielding versions that are used as input + Iterable: Iterable cursor yielding versions that are used as input links for passed version. """ @@ -774,8 +779,8 @@ def get_last_versions(project_name, subset_ids, fields=None): Args: project_name (str): Name of project where to look for queried entities. - subset_ids (list): List of subset ids. - fields (list[str]): Fields that should be returned. All fields are + subset_ids (Iterable[Union[str, ObjectId]]): List of subset ids. + fields (Iterable[str]): Fields that should be returned. All fields are returned if 'None' is passed. Returns: @@ -859,8 +864,8 @@ def get_last_version_by_subset_id(project_name, subset_id, fields=None): Args: project_name (str): Name of project where to look for queried entities. - subset_id (str|ObjectId): Id of version which should be found. - fields (list[str]): Fields that should be returned. All fields are + subset_id (Union[str, ObjectId]): Id of version which should be found. + fields (Iterable[str]): Fields that should be returned. All fields are returned if 'None' is passed. Returns: @@ -889,10 +894,10 @@ def get_last_version_by_subset_name( Args: project_name (str): Name of project where to look for queried entities. subset_name (str): Name of subset. - asset_id (str|ObjectId): Asset id which is parent of passed + asset_id (Union[str, ObjectId]): Asset id which is parent of passed subset name. asset_name (str): Asset name which is parent of passed subset name. - fields (list[str]): Fields that should be returned. All fields are + fields (Iterable[str]): Fields that should be returned. All fields are returned if 'None' is passed. Returns: @@ -923,8 +928,8 @@ def get_representation_by_id(project_name, representation_id, fields=None): Args: project_name (str): Name of project where to look for queried entities. - representation_id (str|ObjectId): Representation id. - fields (list[str]): Fields that should be returned. All fields are + representation_id (Union[str, ObjectId]): Representation id. + fields (Iterable[str]): Fields that should be returned. All fields are returned if 'None' is passed. Returns: @@ -956,8 +961,8 @@ def get_representation_by_name( Args: project_name (str): Name of project where to look for queried entities. representation_name (str): Representation name. - version_id (str|ObjectId): Id of parent version entity. - fields (list[str]): Fields that should be returned. All fields are + version_id (Union[str, ObjectId]): Id of parent version entity. + fields (Iterable[str]): Fields that should be returned. All fields are returned if 'None' is passed. Returns: @@ -1061,18 +1066,18 @@ def get_representations( Args: project_name (str): Name of project where to look for queried entities. - representation_ids (list[str|ObjectId]): Representation ids used as - filter. Filter ignored if 'None' is passed. - representation_names (list[str]): Representations names used as filter. - Filter ignored if 'None' is passed. - version_ids (list[str]): Subset ids used as parent filter. Filter + representation_ids (Iterable[Union[str, ObjectId]]): Representation ids + used as filter. Filter ignored if 'None' is passed. + representation_names (Iterable[str]): Representations names used + as filter. Filter ignored if 'None' is passed. + version_ids (Iterable[str]): Subset ids used as parent filter. Filter ignored if 'None' is passed. - extensions (list[str]): Filter by extension of main representation + extensions (Iterable[str]): Filter by extension of main representation file (without dot). names_by_version_ids (dict[ObjectId, list[str]]): Complex filtering using version ids and list of names under the version. archived (bool): Output will also contain archived representations. - fields (list[str]): Fields that should be returned. All fields are + fields (Iterable[str]): Fields that should be returned. All fields are returned if 'None' is passed. Returns: @@ -1107,17 +1112,17 @@ def get_archived_representations( Args: project_name (str): Name of project where to look for queried entities. - representation_ids (list[str|ObjectId]): Representation ids used as - filter. Filter ignored if 'None' is passed. - representation_names (list[str]): Representations names used as filter. - Filter ignored if 'None' is passed. - version_ids (list[str]): Subset ids used as parent filter. Filter + representation_ids (Iterable[Union[str, ObjectId]]): Representation ids + used as filter. Filter ignored if 'None' is passed. + representation_names (Iterable[str]): Representations names used + as filter. Filter ignored if 'None' is passed. + version_ids (Iterable[str]): Subset ids used as parent filter. Filter ignored if 'None' is passed. - extensions (list[str]): Filter by extension of main representation + extensions (Iterable[str]): Filter by extension of main representation file (without dot). - names_by_version_ids (dict[ObjectId, list[str]]): Complex filtering + names_by_version_ids (dict[ObjectId, List[str]]): Complex filtering using version ids and list of names under the version. - fields (list[str]): Fields that should be returned. All fields are + fields (Iterable[str]): Fields that should be returned. All fields are returned if 'None' is passed. Returns: @@ -1145,7 +1150,7 @@ def get_representations_parents(project_name, representations): Args: project_name (str): Name of project where to look for queried entities. - representations (list[dict]): Representation entities with at least + representations (List[dict]): Representation entities with at least '_id' and 'parent' keys. Returns: @@ -1238,7 +1243,7 @@ def get_thumbnail_id_from_source(project_name, src_type, src_id): Args: project_name (str): Name of project where to look for queried entities. src_type (str): Type of source entity ('asset', 'version'). - src_id (str|objectId): Id of source entity. + src_id (Union[str, ObjectId]): Id of source entity. Returns: ObjectId: Thumbnail id assigned to entity. @@ -1265,8 +1270,9 @@ def get_thumbnails(project_name, thumbnail_ids, fields=None): Args: project_name (str): Name of project where to look for queried entities. - thumbnail_ids (list[str|ObjectId]): Ids of thumbnail entities. - fields (list[str]): Fields that should be returned. All fields are + thumbnail_ids (Iterable[Union[str, ObjectId]]): Ids of thumbnail + entities. + fields (Iterable[str]): Fields that should be returned. All fields are returned if 'None' is passed. Returns: @@ -1291,8 +1297,8 @@ def get_thumbnail(project_name, thumbnail_id, fields=None): Args: project_name (str): Name of project where to look for queried entities. - thumbnail_id (str|ObjectId): Id of thumbnail entity. - fields (list[str]): Fields that should be returned. All fields are + thumbnail_id (Union[str, ObjectId]): Id of thumbnail entity. + fields (Iterable[str]): Fields that should be returned. All fields are returned if 'None' is passed. Returns: @@ -1319,9 +1325,9 @@ def get_workfile_info( Args: project_name (str): Name of project where to look for queried entities. - asset_id (str|ObjectId): Id of asset entity. + asset_id (Union[str, ObjectId]): Id of asset entity. task_name (str): Task name on asset. - fields (list[str]): Fields that should be returned. All fields are + fields (Iterable[str]): Fields that should be returned. All fields are returned if 'None' is passed. """ From 340c20e64ca1e9239b9dc1d067fbdefac053be5e Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 18 Jul 2022 10:43:20 +0200 Subject: [PATCH 386/785] fix line length --- openpype/client/entities.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/client/entities.py b/openpype/client/entities.py index ebd9b4821d..e7eeadcf48 100644 --- a/openpype/client/entities.py +++ b/openpype/client/entities.py @@ -417,8 +417,8 @@ def get_subsets( queried. Filter ignored if 'None' is passed. subset_names (Iterable[str]): Subset names that should be queried. Filter ignored if 'None' is passed. - asset_ids (Iterable[Union[str, ObjectId]]): Asset ids under which should - look for the subsets. Filter ignored if 'None' is passed. + asset_ids (Iterable[Union[str, ObjectId]]): Asset ids under which + should look for the subsets. Filter ignored if 'None' is passed. names_by_asset_ids (dict[ObjectId, List[str]]): Complex filtering using asset ids and list of subset names under the asset. archived (bool): Look for archived subsets too. From eed26c09fc0385a907703622ddfb6f29339b6860 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 18 Jul 2022 10:53:48 +0200 Subject: [PATCH 387/785] OP-3589 - renamed method --- openpype/plugins/publish/extract_thumbnail.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/plugins/publish/extract_thumbnail.py b/openpype/plugins/publish/extract_thumbnail.py index e6df5b3ee0..7933595b89 100644 --- a/openpype/plugins/publish/extract_thumbnail.py +++ b/openpype/plugins/publish/extract_thumbnail.py @@ -46,7 +46,7 @@ class ExtractThumbnail(pyblish.api.InstancePlugin): self.log.info("Skipping - no review set on instance.") return - if self._has_thumbnail_already(instance): + if self._already_has_thumbnail(instance): self.log.info("Thumbnail representation already present.") return @@ -106,7 +106,7 @@ class ExtractThumbnail(pyblish.api.InstancePlugin): # There is no need to create more then one thumbnail break - def _has_thumbnail_already(self, instance): + def _already_has_thumbnail(self, instance): for repre in instance.data.get("representations", []): self.log.info("repre {}".format(repre)) if repre["name"] == "thumbnail": From 247db779fe416c663d0b8e4ae100284fe9977476 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 18 Jul 2022 10:56:08 +0200 Subject: [PATCH 388/785] Update openpype/hosts/traypublisher/plugins/publish/validate_frame_ranges.py Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> --- .../traypublisher/plugins/publish/validate_frame_ranges.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/traypublisher/plugins/publish/validate_frame_ranges.py b/openpype/hosts/traypublisher/plugins/publish/validate_frame_ranges.py index 89289fc6d4..bb6b906e8d 100644 --- a/openpype/hosts/traypublisher/plugins/publish/validate_frame_ranges.py +++ b/openpype/hosts/traypublisher/plugins/publish/validate_frame_ranges.py @@ -58,9 +58,10 @@ class ValidateFrameRange(OptionalPyblishPluginMixin, files = [files] frames = len(files) - msg = "Frame duration from DB:'{}' ". format(int(duration)) +\ - " doesn't match number of files:'{}'".format(frames) +\ - " Please change frame range for Asset or limit no. of files" + msg = ( + "Frame duration from DB:'{}' doesn't match number of files:'{}'" + " Please change frame range for Asset or limit no. of files" + ). format(int(duration), frames) formatting_data = {"duration": duration, "found": frames} From cdabfbe6f9d0ea90df1a44525bf0ae88a45c825d Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 18 Jul 2022 10:56:25 +0200 Subject: [PATCH 389/785] Update openpype/hosts/traypublisher/plugins/publish/validate_frame_ranges.py Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> --- .../traypublisher/plugins/publish/validate_frame_ranges.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/traypublisher/plugins/publish/validate_frame_ranges.py b/openpype/hosts/traypublisher/plugins/publish/validate_frame_ranges.py index bb6b906e8d..0d7081139d 100644 --- a/openpype/hosts/traypublisher/plugins/publish/validate_frame_ranges.py +++ b/openpype/hosts/traypublisher/plugins/publish/validate_frame_ranges.py @@ -35,7 +35,8 @@ class ValidateFrameRange(OptionalPyblishPluginMixin, for pattern in self.skip_timelines_check): self.log.info("Skipping for {} task".format(instance.data["task"])) - asset_data = lib.get_asset(instance.data["asset"])["data"] + asset_doc = instance.data["assetEntity"] + asset_data = asset_doc["data"] frame_start = asset_data["frameStart"] frame_end = asset_data["frameEnd"] handle_start = asset_data["handleStart"] From ef2284e507cd43db6b14518244333d10db1091b6 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 18 Jul 2022 10:57:01 +0200 Subject: [PATCH 390/785] Update openpype/hosts/traypublisher/plugins/publish/validate_frame_ranges.py Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> --- .../plugins/publish/validate_frame_ranges.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/openpype/hosts/traypublisher/plugins/publish/validate_frame_ranges.py b/openpype/hosts/traypublisher/plugins/publish/validate_frame_ranges.py index 0d7081139d..6d48e8352c 100644 --- a/openpype/hosts/traypublisher/plugins/publish/validate_frame_ranges.py +++ b/openpype/hosts/traypublisher/plugins/publish/validate_frame_ranges.py @@ -43,18 +43,19 @@ class ValidateFrameRange(OptionalPyblishPluginMixin, handle_end = asset_data["handleEnd"] duration = (frame_end - frame_start + 1) + handle_start + handle_end - repre = instance.data.get("representations", [None]) - if not repre: + repres = instance.data.get("representations") + if not repres: self.log.info("No representations, skipping.") return - - ext = repre[0]['ext'].replace(".", '') + + first_repre = repres[0] + ext = first_repre['ext'].replace(".", '') if not ext or ext.lower() not in self.check_extensions: self.log.warning("Cannot check for extension {}".format(ext)) return - files = instance.data.get("representations", [None])[0]["files"] + files = first_repre["files"] if isinstance(files, str): files = [files] frames = len(files) From 247eaf792bb21fd294bf95bb64423965834a4b00 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 18 Jul 2022 11:01:30 +0200 Subject: [PATCH 391/785] Check only if skip_timelines_check is filled --- .../traypublisher/plugins/publish/validate_frame_ranges.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/traypublisher/plugins/publish/validate_frame_ranges.py b/openpype/hosts/traypublisher/plugins/publish/validate_frame_ranges.py index 6d48e8352c..65b6128cbe 100644 --- a/openpype/hosts/traypublisher/plugins/publish/validate_frame_ranges.py +++ b/openpype/hosts/traypublisher/plugins/publish/validate_frame_ranges.py @@ -31,8 +31,9 @@ class ValidateFrameRange(OptionalPyblishPluginMixin, if not self.is_active(instance.data): return - if any(re.search(pattern, instance.data["task"]) - for pattern in self.skip_timelines_check): + if (self.skip_timelines_check and + any(re.search(pattern, instance.data["task"]) + for pattern in self.skip_timelines_check)): self.log.info("Skipping for {} task".format(instance.data["task"])) asset_doc = instance.data["assetEntity"] From e21338424ed92c4b59422f487de6de501d756c3c Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 18 Jul 2022 11:02:50 +0200 Subject: [PATCH 392/785] Hound --- .../traypublisher/plugins/publish/validate_frame_ranges.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/openpype/hosts/traypublisher/plugins/publish/validate_frame_ranges.py b/openpype/hosts/traypublisher/plugins/publish/validate_frame_ranges.py index 65b6128cbe..947624100a 100644 --- a/openpype/hosts/traypublisher/plugins/publish/validate_frame_ranges.py +++ b/openpype/hosts/traypublisher/plugins/publish/validate_frame_ranges.py @@ -3,7 +3,6 @@ import re import pyblish.api import openpype.api -from openpype import lib from openpype.pipeline import ( PublishXmlValidationError, OptionalPyblishPluginMixin @@ -48,7 +47,7 @@ class ValidateFrameRange(OptionalPyblishPluginMixin, if not repres: self.log.info("No representations, skipping.") return - + first_repre = repres[0] ext = first_repre['ext'].replace(".", '') From 4f6646d6c7b712e8ad6678a410da41772642e868 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 18 Jul 2022 11:11:08 +0200 Subject: [PATCH 393/785] Update openpype/hosts/traypublisher/plugins/publish/collect_mov_batch.py Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> --- .../hosts/traypublisher/plugins/publish/collect_mov_batch.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/traypublisher/plugins/publish/collect_mov_batch.py b/openpype/hosts/traypublisher/plugins/publish/collect_mov_batch.py index 99065d2408..d24659aa8b 100644 --- a/openpype/hosts/traypublisher/plugins/publish/collect_mov_batch.py +++ b/openpype/hosts/traypublisher/plugins/publish/collect_mov_batch.py @@ -19,7 +19,7 @@ class CollectMovBatch( hosts = ["traypublisher"] def process(self, instance): - if not instance.data.get("creator_identifier") == "render_mov_batch": + if instance.data.get("creator_identifier") != "render_mov_batch": return creator_attributes = instance.data["creator_attributes"] From 846e23dbabbbc9fd64f2620cfa139ea87ca1fcd8 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 18 Jul 2022 11:16:53 +0200 Subject: [PATCH 394/785] copied mongo.py from lib to client --- openpype/client/mongo.py | 210 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 210 insertions(+) create mode 100644 openpype/client/mongo.py diff --git a/openpype/client/mongo.py b/openpype/client/mongo.py new file mode 100644 index 0000000000..a747250107 --- /dev/null +++ b/openpype/client/mongo.py @@ -0,0 +1,210 @@ +import os +import sys +import time +import logging +import pymongo +import certifi + +if sys.version_info[0] == 2: + from urlparse import urlparse, parse_qs +else: + from urllib.parse import urlparse, parse_qs + + +class MongoEnvNotSet(Exception): + pass + + +def _decompose_url(url): + """Decompose mongo url to basic components. + + Used for creation of MongoHandler which expect mongo url components as + separated kwargs. Components are at the end not used as we're setting + connection directly this is just a dumb components for MongoHandler + validation pass. + """ + + # Use first url from passed url + # - this is because it is possible to pass multiple urls for multiple + # replica sets which would crash on urlparse otherwise + # - please don't use comma in username of password + url = url.split(",")[0] + components = { + "scheme": None, + "host": None, + "port": None, + "username": None, + "password": None, + "auth_db": None + } + + result = urlparse(url) + if result.scheme is None: + _url = "mongodb://{}".format(url) + result = urlparse(_url) + + components["scheme"] = result.scheme + components["host"] = result.hostname + try: + components["port"] = result.port + except ValueError: + raise RuntimeError("invalid port specified") + components["username"] = result.username + components["password"] = result.password + + try: + components["auth_db"] = parse_qs(result.query)['authSource'][0] + except KeyError: + # no auth db provided, mongo will use the one we are connecting to + pass + + return components + + +def get_default_components(): + mongo_url = os.environ.get("OPENPYPE_MONGO") + if mongo_url is None: + raise MongoEnvNotSet( + "URL for Mongo logging connection is not set." + ) + return _decompose_url(mongo_url) + + +def should_add_certificate_path_to_mongo_url(mongo_url): + """Check if should add ca certificate to mongo url. + + Since 30.9.2021 cloud mongo requires newer certificates that are not + available on most of workstation. This adds path to certifi certificate + which is valid for it. To add the certificate path url must have scheme + 'mongodb+srv' or has 'ssl=true' or 'tls=true' in url query. + """ + + parsed = urlparse(mongo_url) + query = parse_qs(parsed.query) + lowered_query_keys = set(key.lower() for key in query.keys()) + add_certificate = False + # Check if url 'ssl' or 'tls' are set to 'true' + for key in ("ssl", "tls"): + if key in query and "true" in query["ssl"]: + add_certificate = True + break + + # Check if url contains 'mongodb+srv' + if not add_certificate and parsed.scheme == "mongodb+srv": + add_certificate = True + + # Check if url does already contain certificate path + if add_certificate and "tlscafile" in lowered_query_keys: + add_certificate = False + + return add_certificate + + +def validate_mongo_connection(mongo_uri): + """Check if provided mongodb URL is valid. + + Args: + mongo_uri (str): URL to validate. + + Raises: + ValueError: When port in mongo uri is not valid. + pymongo.errors.InvalidURI: If passed mongo is invalid. + pymongo.errors.ServerSelectionTimeoutError: If connection timeout + passed so probably couldn't connect to mongo server. + + """ + + client = OpenPypeMongoConnection.create_connection( + mongo_uri, retry_attempts=1 + ) + client.close() + + +class OpenPypeMongoConnection: + """Singleton MongoDB connection. + + Keeps MongoDB connections by url. + """ + + mongo_clients = {} + log = logging.getLogger("OpenPypeMongoConnection") + + @staticmethod + def get_default_mongo_url(): + return os.environ["OPENPYPE_MONGO"] + + @classmethod + def get_mongo_client(cls, mongo_url=None): + if mongo_url is None: + mongo_url = cls.get_default_mongo_url() + + connection = cls.mongo_clients.get(mongo_url) + if connection: + # Naive validation of existing connection + try: + connection.server_info() + with connection.start_session(): + pass + except Exception: + connection = None + + if not connection: + cls.log.debug("Creating mongo connection to {}".format(mongo_url)) + connection = cls.create_connection(mongo_url) + cls.mongo_clients[mongo_url] = connection + + return connection + + @classmethod + def create_connection(cls, mongo_url, timeout=None, retry_attempts=None): + parsed = urlparse(mongo_url) + # Force validation of scheme + if parsed.scheme not in ["mongodb", "mongodb+srv"]: + raise pymongo.errors.InvalidURI(( + "Invalid URI scheme:" + " URI must begin with 'mongodb://' or 'mongodb+srv://'" + )) + + if timeout is None: + timeout = int(os.environ.get("AVALON_TIMEOUT") or 1000) + + kwargs = { + "serverSelectionTimeoutMS": timeout + } + if should_add_certificate_path_to_mongo_url(mongo_url): + kwargs["ssl_ca_certs"] = certifi.where() + + mongo_client = pymongo.MongoClient(mongo_url, **kwargs) + + if retry_attempts is None: + retry_attempts = 3 + + elif not retry_attempts: + retry_attempts = 1 + + last_exc = None + valid = False + t1 = time.time() + for attempt in range(1, retry_attempts + 1): + try: + mongo_client.server_info() + with mongo_client.start_session(): + pass + valid = True + break + + except Exception as exc: + last_exc = exc + if attempt < retry_attempts: + cls.log.warning( + "Attempt {} failed. Retrying... ".format(attempt) + ) + time.sleep(1) + + if not valid: + raise last_exc + + cls.log.info("Connected to {}, delay {:.3f}s".format( + mongo_url, time.time() - t1 + )) + return mongo_client From ccbc18fd82d7a00c2ab187489ca50977da5e9b25 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 18 Jul 2022 11:17:01 +0200 Subject: [PATCH 395/785] 'OpenPypeMongoConnection' is available in 'openpype.client' --- openpype/client/__init__.py | 6 ++++++ openpype/client/entities.py | 2 +- 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/openpype/client/__init__.py b/openpype/client/__init__.py index 97e6755d09..0bd79de140 100644 --- a/openpype/client/__init__.py +++ b/openpype/client/__init__.py @@ -1,3 +1,7 @@ +from .mongo import ( + OpenPypeMongoConnection, +) + from .entities import ( get_projects, get_project, @@ -40,6 +44,8 @@ from .entities import ( ) __all__ = ( + "OpenPypeMongoConnection", + "get_projects", "get_project", "get_whole_project", diff --git a/openpype/client/entities.py b/openpype/client/entities.py index 9d65355d1b..1c32632915 100644 --- a/openpype/client/entities.py +++ b/openpype/client/entities.py @@ -12,7 +12,7 @@ import collections import six from bson.objectid import ObjectId -from openpype.lib.mongo import OpenPypeMongoConnection +from .mongo import OpenPypeMongoConnection def _get_project_database(): From 308d9e9c498642ebc0a8dfa3e2e86603a4c01ad5 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 18 Jul 2022 11:20:14 +0200 Subject: [PATCH 396/785] use 'OpenPypeMongoConnection' from 'openpype.client' --- openpype/hosts/maya/api/shader_definition_editor.py | 2 +- .../hosts/maya/plugins/publish/validate_model_name.py | 2 +- .../webpublisher/webserver_service/webpublish_routes.py | 2 +- .../hosts/webpublisher/webserver_service/webserver_cli.py | 3 +-- openpype/lib/local_settings.py | 2 +- openpype/lib/log.py | 7 ++++--- openpype/lib/remote_publish.py | 2 +- openpype/modules/ftrack/ftrack_server/event_server_cli.py | 8 ++++---- openpype/modules/ftrack/ftrack_server/lib.py | 2 +- openpype/modules/ftrack/scripts/sub_event_storer.py | 3 ++- .../modules/slack/plugins/publish/integrate_slack_api.py | 2 +- openpype/pipeline/mongodb.py | 4 ++-- openpype/settings/handlers.py | 2 +- 13 files changed, 21 insertions(+), 20 deletions(-) diff --git a/openpype/hosts/maya/api/shader_definition_editor.py b/openpype/hosts/maya/api/shader_definition_editor.py index 911db48ac2..6ea5e1a127 100644 --- a/openpype/hosts/maya/api/shader_definition_editor.py +++ b/openpype/hosts/maya/api/shader_definition_editor.py @@ -6,7 +6,7 @@ Shader names are stored as simple text file over GridFS in mongodb. """ import os from Qt import QtWidgets, QtCore, QtGui -from openpype.lib.mongo import OpenPypeMongoConnection +from openpype.client.mongo import OpenPypeMongoConnection from openpype import resources import gridfs diff --git a/openpype/hosts/maya/plugins/publish/validate_model_name.py b/openpype/hosts/maya/plugins/publish/validate_model_name.py index 50acf2b8b7..02107d5732 100644 --- a/openpype/hosts/maya/plugins/publish/validate_model_name.py +++ b/openpype/hosts/maya/plugins/publish/validate_model_name.py @@ -10,7 +10,7 @@ from openpype.pipeline import legacy_io import openpype.hosts.maya.api.action from openpype.hosts.maya.api.shader_definition_editor import ( DEFINITION_FILENAME) -from openpype.lib.mongo import OpenPypeMongoConnection +from openpype.client.mongo import OpenPypeMongoConnection import gridfs diff --git a/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py b/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py index 4cb3cee8e1..6444a5191d 100644 --- a/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py +++ b/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py @@ -10,9 +10,9 @@ from aiohttp.web_response import Response from openpype.client import ( get_projects, get_assets, + OpenPypeMongoConnection, ) from openpype.lib import ( - OpenPypeMongoConnection, PypeLogger, ) from openpype.lib.remote_publish import ( diff --git a/openpype/hosts/webpublisher/webserver_service/webserver_cli.py b/openpype/hosts/webpublisher/webserver_service/webserver_cli.py index 1ed8f22b2c..6620e5d5cf 100644 --- a/openpype/hosts/webpublisher/webserver_service/webserver_cli.py +++ b/openpype/hosts/webpublisher/webserver_service/webserver_cli.py @@ -6,6 +6,7 @@ import requests import json import subprocess +from openpype.client import OpenPypeMongoConnection from openpype.lib import PypeLogger from .webpublish_routes import ( @@ -121,8 +122,6 @@ def run_webserver(*args, **kwargs): def reprocess_failed(upload_dir, webserver_url): # log.info("check_reprocesable_records") - from openpype.lib import OpenPypeMongoConnection - mongo_client = OpenPypeMongoConnection.get_mongo_client() database_name = os.environ["OPENPYPE_DATABASE_NAME"] dbcon = mongo_client[database_name]["webpublishes"] diff --git a/openpype/lib/local_settings.py b/openpype/lib/local_settings.py index 97e99b4b5a..c6c9699240 100644 --- a/openpype/lib/local_settings.py +++ b/openpype/lib/local_settings.py @@ -34,7 +34,7 @@ from openpype.settings import ( get_system_settings ) -from .import validate_mongo_connection +from openpype.client.mongo import validate_mongo_connection _PLACEHOLDER = object() diff --git a/openpype/lib/log.py b/openpype/lib/log.py index 2cdb7ec8e4..e0fc7b33b1 100644 --- a/openpype/lib/log.py +++ b/openpype/lib/log.py @@ -24,12 +24,13 @@ import traceback import threading import copy -from . import Terminal -from .mongo import ( +from openpype.client.mongo import ( MongoEnvNotSet, get_default_components, - OpenPypeMongoConnection + OpenPypeMongoConnection, ) +from . import Terminal + try: import log4mongo from log4mongo.handlers import MongoHandler diff --git a/openpype/lib/remote_publish.py b/openpype/lib/remote_publish.py index d7884d0200..38c6b07c5b 100644 --- a/openpype/lib/remote_publish.py +++ b/openpype/lib/remote_publish.py @@ -7,7 +7,7 @@ from bson.objectid import ObjectId import pyblish.util import pyblish.api -from openpype.lib.mongo import OpenPypeMongoConnection +from openpype.client.mongo import OpenPypeMongoConnection from openpype.lib.plugin_tools import parse_json ERROR_STATUS = "error" diff --git a/openpype/modules/ftrack/ftrack_server/event_server_cli.py b/openpype/modules/ftrack/ftrack_server/event_server_cli.py index 90ce757242..3ef7c8270a 100644 --- a/openpype/modules/ftrack/ftrack_server/event_server_cli.py +++ b/openpype/modules/ftrack/ftrack_server/event_server_cli.py @@ -1,11 +1,9 @@ import os -import sys import signal import datetime import subprocess import socket import json -import platform import getpass import atexit import time @@ -13,12 +11,14 @@ import uuid import ftrack_api import pymongo +from openpype.client.mongo import ( + OpenPypeMongoConnection, + validate_mongo_connection, +) from openpype.lib import ( get_openpype_execute_args, - OpenPypeMongoConnection, get_openpype_version, get_build_version, - validate_mongo_connection ) from openpype_modules.ftrack import FTRACK_MODULE_DIR from openpype_modules.ftrack.lib import credentials diff --git a/openpype/modules/ftrack/ftrack_server/lib.py b/openpype/modules/ftrack/ftrack_server/lib.py index 5c6d6352d2..3da1e7c7f0 100644 --- a/openpype/modules/ftrack/ftrack_server/lib.py +++ b/openpype/modules/ftrack/ftrack_server/lib.py @@ -24,7 +24,7 @@ except ImportError: from ftrack_api._weakref import WeakMethod from openpype_modules.ftrack.lib import get_ftrack_event_mongo_info -from openpype.lib import OpenPypeMongoConnection +from openpype.client import OpenPypeMongoConnection from openpype.api import Logger TOPIC_STATUS_SERVER = "openpype.event.server.status" diff --git a/openpype/modules/ftrack/scripts/sub_event_storer.py b/openpype/modules/ftrack/scripts/sub_event_storer.py index 946ecbff79..204cce89e8 100644 --- a/openpype/modules/ftrack/scripts/sub_event_storer.py +++ b/openpype/modules/ftrack/scripts/sub_event_storer.py @@ -6,6 +6,8 @@ import socket import pymongo import ftrack_api + +from openpype.client import OpenPypeMongoConnection from openpype_modules.ftrack.ftrack_server.ftrack_server import FtrackServer from openpype_modules.ftrack.ftrack_server.lib import ( SocketSession, @@ -15,7 +17,6 @@ from openpype_modules.ftrack.ftrack_server.lib import ( ) from openpype_modules.ftrack.lib import get_ftrack_event_mongo_info from openpype.lib import ( - OpenPypeMongoConnection, get_openpype_version, get_build_version ) diff --git a/openpype/modules/slack/plugins/publish/integrate_slack_api.py b/openpype/modules/slack/plugins/publish/integrate_slack_api.py index 10bde7d4c0..c3b288f0cd 100644 --- a/openpype/modules/slack/plugins/publish/integrate_slack_api.py +++ b/openpype/modules/slack/plugins/publish/integrate_slack_api.py @@ -4,8 +4,8 @@ import pyblish.api import copy from datetime import datetime +from openpype.client import OpenPypeMongoConnection from openpype.lib.plugin_tools import prepare_template_data -from openpype.lib import OpenPypeMongoConnection class IntegrateSlackAPI(pyblish.api.InstancePlugin): diff --git a/openpype/pipeline/mongodb.py b/openpype/pipeline/mongodb.py index dab5bb9e13..be2b67a5e7 100644 --- a/openpype/pipeline/mongodb.py +++ b/openpype/pipeline/mongodb.py @@ -5,6 +5,8 @@ import logging import pymongo from uuid import uuid4 +from openpype.client import OpenPypeMongoConnection + from . import schema @@ -156,8 +158,6 @@ class AvalonMongoDB: @property def mongo_client(self): - from openpype.lib import OpenPypeMongoConnection - return OpenPypeMongoConnection.get_mongo_client() @property diff --git a/openpype/settings/handlers.py b/openpype/settings/handlers.py index c99fc6080b..2bcc2e06dd 100644 --- a/openpype/settings/handlers.py +++ b/openpype/settings/handlers.py @@ -7,6 +7,7 @@ from abc import ABCMeta, abstractmethod import six import openpype.version +from openpype.client.mongo import OpenPypeMongoConnection from .constants import ( GLOBAL_SETTINGS_KEY, @@ -337,7 +338,6 @@ class MongoSettingsHandler(SettingsHandler): def __init__(self): # Get mongo connection - from openpype.lib import OpenPypeMongoConnection from openpype.pipeline import AvalonMongoDB settings_collection = OpenPypeMongoConnection.get_mongo_client() From dc6b02c234c9862dbcf60139e7f5463b919d3e20 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 18 Jul 2022 11:20:49 +0200 Subject: [PATCH 397/785] mongo.py in openpype.lib is reusing functionality from openpype.client for backwards compatibility --- openpype/lib/mongo.py | 211 ++++-------------------------------------- 1 file changed, 18 insertions(+), 193 deletions(-) diff --git a/openpype/lib/mongo.py b/openpype/lib/mongo.py index c08e76c75c..80487f317d 100644 --- a/openpype/lib/mongo.py +++ b/openpype/lib/mongo.py @@ -1,206 +1,31 @@ -import os -import sys -import time -import logging -import pymongo -import certifi - -if sys.version_info[0] == 2: - from urlparse import urlparse, parse_qs -else: - from urllib.parse import urlparse, parse_qs - - -class MongoEnvNotSet(Exception): - pass - - -def _decompose_url(url): - """Decompose mongo url to basic components. - - Used for creation of MongoHandler which expect mongo url components as - separated kwargs. Components are at the end not used as we're setting - connection directly this is just a dumb components for MongoHandler - validation pass. - """ - # Use first url from passed url - # - this is because it is possible to pass multiple urls for multiple - # replica sets which would crash on urlparse otherwise - # - please don't use comma in username of password - url = url.split(",")[0] - components = { - "scheme": None, - "host": None, - "port": None, - "username": None, - "password": None, - "auth_db": None - } - - result = urlparse(url) - if result.scheme is None: - _url = "mongodb://{}".format(url) - result = urlparse(_url) - - components["scheme"] = result.scheme - components["host"] = result.hostname - try: - components["port"] = result.port - except ValueError: - raise RuntimeError("invalid port specified") - components["username"] = result.username - components["password"] = result.password - - try: - components["auth_db"] = parse_qs(result.query)['authSource'][0] - except KeyError: - # no auth db provided, mongo will use the one we are connecting to - pass - - return components +from openpype.client.mongo import ( + MongoEnvNotSet, + OpenPypeMongoConnection, +) def get_default_components(): - mongo_url = os.environ.get("OPENPYPE_MONGO") - if mongo_url is None: - raise MongoEnvNotSet( - "URL for Mongo logging connection is not set." - ) - return _decompose_url(mongo_url) + from openpype.client.mongo import get_default_components + + return get_default_components() def should_add_certificate_path_to_mongo_url(mongo_url): - """Check if should add ca certificate to mongo url. + from openpype.client.mongo import should_add_certificate_path_to_mongo_url - Since 30.9.2021 cloud mongo requires newer certificates that are not - available on most of workstation. This adds path to certifi certificate - which is valid for it. To add the certificate path url must have scheme - 'mongodb+srv' or has 'ssl=true' or 'tls=true' in url query. - """ - parsed = urlparse(mongo_url) - query = parse_qs(parsed.query) - lowered_query_keys = set(key.lower() for key in query.keys()) - add_certificate = False - # Check if url 'ssl' or 'tls' are set to 'true' - for key in ("ssl", "tls"): - if key in query and "true" in query["ssl"]: - add_certificate = True - break - - # Check if url contains 'mongodb+srv' - if not add_certificate and parsed.scheme == "mongodb+srv": - add_certificate = True - - # Check if url does already contain certificate path - if add_certificate and "tlscafile" in lowered_query_keys: - add_certificate = False - - return add_certificate + return should_add_certificate_path_to_mongo_url(mongo_url) def validate_mongo_connection(mongo_uri): - """Check if provided mongodb URL is valid. + from openpype.client.mongo import validate_mongo_connection - Args: - mongo_uri (str): URL to validate. - - Raises: - ValueError: When port in mongo uri is not valid. - pymongo.errors.InvalidURI: If passed mongo is invalid. - pymongo.errors.ServerSelectionTimeoutError: If connection timeout - passed so probably couldn't connect to mongo server. - - """ - client = OpenPypeMongoConnection.create_connection( - mongo_uri, retry_attempts=1 - ) - client.close() + return validate_mongo_connection(mongo_uri) -class OpenPypeMongoConnection: - """Singleton MongoDB connection. - - Keeps MongoDB connections by url. - """ - mongo_clients = {} - log = logging.getLogger("OpenPypeMongoConnection") - - @staticmethod - def get_default_mongo_url(): - return os.environ["OPENPYPE_MONGO"] - - @classmethod - def get_mongo_client(cls, mongo_url=None): - if mongo_url is None: - mongo_url = cls.get_default_mongo_url() - - connection = cls.mongo_clients.get(mongo_url) - if connection: - # Naive validation of existing connection - try: - connection.server_info() - with connection.start_session(): - pass - except Exception: - connection = None - - if not connection: - cls.log.debug("Creating mongo connection to {}".format(mongo_url)) - connection = cls.create_connection(mongo_url) - cls.mongo_clients[mongo_url] = connection - - return connection - - @classmethod - def create_connection(cls, mongo_url, timeout=None, retry_attempts=None): - parsed = urlparse(mongo_url) - # Force validation of scheme - if parsed.scheme not in ["mongodb", "mongodb+srv"]: - raise pymongo.errors.InvalidURI(( - "Invalid URI scheme:" - " URI must begin with 'mongodb://' or 'mongodb+srv://'" - )) - - if timeout is None: - timeout = int(os.environ.get("AVALON_TIMEOUT") or 1000) - - kwargs = { - "serverSelectionTimeoutMS": timeout - } - if should_add_certificate_path_to_mongo_url(mongo_url): - kwargs["ssl_ca_certs"] = certifi.where() - - mongo_client = pymongo.MongoClient(mongo_url, **kwargs) - - if retry_attempts is None: - retry_attempts = 3 - - elif not retry_attempts: - retry_attempts = 1 - - last_exc = None - valid = False - t1 = time.time() - for attempt in range(1, retry_attempts + 1): - try: - mongo_client.server_info() - with mongo_client.start_session(): - pass - valid = True - break - - except Exception as exc: - last_exc = exc - if attempt < retry_attempts: - cls.log.warning( - "Attempt {} failed. Retrying... ".format(attempt) - ) - time.sleep(1) - - if not valid: - raise last_exc - - cls.log.info("Connected to {}, delay {:.3f}s".format( - mongo_url, time.time() - t1 - )) - return mongo_client +__all__ = ( + "MongoEnvNotSet", + "OpenPypeMongoConnection", + "get_default_components", + "should_add_certificate_path_to_mongo_url", + "validate_mongo_connection", +) From 69ad12d61dc93b66ad9620496a4a1776f9f92306 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 18 Jul 2022 11:21:30 +0200 Subject: [PATCH 398/785] made '_get_project_connection' function temporarily public for create, update and remove --- openpype/client/entities.py | 60 +++++++++++++++++++++++-------------- 1 file changed, 37 insertions(+), 23 deletions(-) diff --git a/openpype/client/entities.py b/openpype/client/entities.py index 1c32632915..aacc3a2304 100644 --- a/openpype/client/entities.py +++ b/openpype/client/entities.py @@ -20,7 +20,21 @@ def _get_project_database(): return OpenPypeMongoConnection.get_mongo_client()[db_name] -def _get_project_connection(project_name): +def get_project_connection(project_name): + """Direct access to mongo collection. + + We're trying to avoid using direct access to mongo. This should be used + only for Create, Update and Remove operations until there are implemented + api calls for that. + + Args: + project_name(str): Project name for which collection should be + returned. + + Returns: + pymongo.Collection: Collection realated to passed project. + """ + if not project_name: raise ValueError("Invalid project name {}".format(str(project_name))) return _get_project_database()[project_name] @@ -93,7 +107,7 @@ def get_project(project_name, active=True, inactive=False, fields=None): {"data.active": False}, ] - conn = _get_project_connection(project_name) + conn = get_project_connection(project_name) return conn.find_one(query_filter, _prepare_fields(fields)) @@ -108,7 +122,7 @@ def get_whole_project(project_name): project collection. """ - conn = _get_project_connection(project_name) + conn = get_project_connection(project_name) return conn.find({}) @@ -131,7 +145,7 @@ def get_asset_by_id(project_name, asset_id, fields=None): return None query_filter = {"type": "asset", "_id": asset_id} - conn = _get_project_connection(project_name) + conn = get_project_connection(project_name) return conn.find_one(query_filter, _prepare_fields(fields)) @@ -153,7 +167,7 @@ def get_asset_by_name(project_name, asset_name, fields=None): return None query_filter = {"type": "asset", "name": asset_name} - conn = _get_project_connection(project_name) + conn = get_project_connection(project_name) return conn.find_one(query_filter, _prepare_fields(fields)) @@ -222,7 +236,7 @@ def _get_assets( return [] query_filter["data.visualParent"] = {"$in": parent_ids} - conn = _get_project_connection(project_name) + conn = get_project_connection(project_name) return conn.find(query_filter, _prepare_fields(fields)) @@ -319,7 +333,7 @@ def get_asset_ids_with_subsets(project_name, asset_ids=None): return [] subset_query["parent"] = {"$in": asset_ids} - conn = _get_project_connection(project_name) + conn = get_project_connection(project_name) result = conn.aggregate([ { "$match": subset_query @@ -359,7 +373,7 @@ def get_subset_by_id(project_name, subset_id, fields=None): return None query_filters = {"type": "subset", "_id": subset_id} - conn = _get_project_connection(project_name) + conn = get_project_connection(project_name) return conn.find_one(query_filters, _prepare_fields(fields)) @@ -390,7 +404,7 @@ def get_subset_by_name(project_name, subset_name, asset_id, fields=None): "name": subset_name, "parent": asset_id } - conn = _get_project_connection(project_name) + conn = get_project_connection(project_name) return conn.find_one(query_filters, _prepare_fields(fields)) @@ -463,7 +477,7 @@ def get_subsets( return [] query_filter["$or"] = or_query - conn = _get_project_connection(project_name) + conn = get_project_connection(project_name) return conn.find(query_filter, _prepare_fields(fields)) @@ -487,7 +501,7 @@ def get_subset_families(project_name, subset_ids=None): return set() subset_filter["_id"] = {"$in": list(subset_ids)} - conn = _get_project_connection(project_name) + conn = get_project_connection(project_name) result = list(conn.aggregate([ {"$match": subset_filter}, {"$project": { @@ -525,7 +539,7 @@ def get_version_by_id(project_name, version_id, fields=None): "type": {"$in": ["version", "hero_version"]}, "_id": version_id } - conn = _get_project_connection(project_name) + conn = get_project_connection(project_name) return conn.find_one(query_filter, _prepare_fields(fields)) @@ -548,7 +562,7 @@ def get_version_by_name(project_name, version, subset_id, fields=None): if not subset_id: return None - conn = _get_project_connection(project_name) + conn = get_project_connection(project_name) query_filter = { "type": "version", "parent": subset_id, @@ -602,7 +616,7 @@ def _get_versions( else: query_filter["name"] = {"$in": versions} - conn = _get_project_connection(project_name) + conn = get_project_connection(project_name) return conn.find(query_filter, _prepare_fields(fields)) @@ -760,7 +774,7 @@ def get_output_link_versions(project_name, version_id, fields=None): if not version_id: return [] - conn = _get_project_connection(project_name) + conn = get_project_connection(project_name) # Does make sense to look for hero versions? query_filter = { "type": "version", @@ -825,7 +839,7 @@ def get_last_versions(project_name, subset_ids, fields=None): {"$group": group_item} ] - conn = _get_project_connection(project_name) + conn = get_project_connection(project_name) aggregate_result = conn.aggregate(aggregation_pipeline) if limit_query: output = {} @@ -943,7 +957,7 @@ def get_representation_by_id(project_name, representation_id, fields=None): if representation_id is not None: query_filter["_id"] = _convert_id(representation_id) - conn = _get_project_connection(project_name) + conn = get_project_connection(project_name) return conn.find_one(query_filter, _prepare_fields(fields)) @@ -976,7 +990,7 @@ def get_representation_by_name( "parent": version_id } - conn = _get_project_connection(project_name) + conn = get_project_connection(project_name) return conn.find_one(query_filter, _prepare_fields(fields)) @@ -1039,7 +1053,7 @@ def _get_representations( return [] query_filter["$or"] = or_query - conn = _get_project_connection(project_name) + conn = get_project_connection(project_name) return conn.find(query_filter, _prepare_fields(fields)) @@ -1250,7 +1264,7 @@ def get_thumbnail_id_from_source(project_name, src_type, src_id): query_filter = {"_id": _convert_id(src_id)} - conn = _get_project_connection(project_name) + conn = get_project_connection(project_name) src_doc = conn.find_one(query_filter, {"data.thumbnail_id"}) if src_doc: return src_doc.get("data", {}).get("thumbnail_id") @@ -1282,7 +1296,7 @@ def get_thumbnails(project_name, thumbnail_ids, fields=None): "type": "thumbnail", "_id": {"$in": thumbnail_ids} } - conn = _get_project_connection(project_name) + conn = get_project_connection(project_name) return conn.find(query_filter, _prepare_fields(fields)) @@ -1303,7 +1317,7 @@ def get_thumbnail(project_name, thumbnail_id, fields=None): if not thumbnail_id: return None query_filter = {"type": "thumbnail", "_id": _convert_id(thumbnail_id)} - conn = _get_project_connection(project_name) + conn = get_project_connection(project_name) return conn.find_one(query_filter, _prepare_fields(fields)) @@ -1334,7 +1348,7 @@ def get_workfile_info( "task_name": task_name, "filename": filename } - conn = _get_project_connection(project_name) + conn = get_project_connection(project_name) return conn.find_one(query_filter, _prepare_fields(fields)) From b23d89a149efb105db1f7ff8aa1c71726b7782b8 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 18 Jul 2022 11:21:59 +0200 Subject: [PATCH 399/785] use client function in settings handlers instead of AvalonMongoDB --- openpype/settings/handlers.py | 13 ++++--------- 1 file changed, 4 insertions(+), 9 deletions(-) diff --git a/openpype/settings/handlers.py b/openpype/settings/handlers.py index 2bcc2e06dd..15ae2351fd 100644 --- a/openpype/settings/handlers.py +++ b/openpype/settings/handlers.py @@ -8,6 +8,7 @@ import six import openpype.version from openpype.client.mongo import OpenPypeMongoConnection +from openpype.client.entities import get_project_connection, get_project from .constants import ( GLOBAL_SETTINGS_KEY, @@ -338,8 +339,6 @@ class MongoSettingsHandler(SettingsHandler): def __init__(self): # Get mongo connection - from openpype.pipeline import AvalonMongoDB - settings_collection = OpenPypeMongoConnection.get_mongo_client() self._anatomy_keys = None @@ -362,7 +361,6 @@ class MongoSettingsHandler(SettingsHandler): self.collection_name = collection_name self.collection = settings_collection[database_name][collection_name] - self.avalon_db = AvalonMongoDB() self.system_settings_cache = CacheValues() self.project_settings_cache = collections.defaultdict(CacheValues) @@ -607,16 +605,14 @@ class MongoSettingsHandler(SettingsHandler): new_data = data_cache.data_copy() # Prepare avalon project document - collection = self.avalon_db.database[project_name] - project_doc = collection.find_one({ - "type": "project" - }) + project_doc = get_project(project_name) if not project_doc: raise ValueError(( "Project document of project \"{}\" does not exists." " Create project first." ).format(project_name)) + collection = get_project_connection(project_name) # Project's data update_dict_data = {} project_doc_data = project_doc.get("data") or {} @@ -1145,8 +1141,7 @@ class MongoSettingsHandler(SettingsHandler): document, version ) else: - collection = self.avalon_db.database[project_name] - project_doc = collection.find_one({"type": "project"}) + project_doc = get_project(project_name) self.project_anatomy_cache[project_name].update_data( self.project_doc_to_anatomy_data(project_doc), self._current_version From 9ef9c79e8fb7cf6e2a783abe3eba1ba13f1eaa6d Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Mon, 18 Jul 2022 11:34:30 +0200 Subject: [PATCH 400/785] move all hosts and families to the new integrator --- .../defaults/project_settings/global.json | 83 ++++--------------- .../schemas/schema_global_publish.json | 4 +- 2 files changed, 18 insertions(+), 69 deletions(-) diff --git a/openpype/settings/defaults/project_settings/global.json b/openpype/settings/defaults/project_settings/global.json index 545c792d47..d923fc65c9 100644 --- a/openpype/settings/defaults/project_settings/global.json +++ b/openpype/settings/defaults/project_settings/global.json @@ -172,74 +172,8 @@ }, "IntegrateAssetNew": { "hosts": [ - "aftereffects", - "blender", - "celaction", - "flame", - "fusion", - "harmony", - "hiero", - "houdini", - "nuke", - "photoshop", - "resolve", - "tvpaint", - "unreal", - "standalonepublisher", - "webpublisher" ], "families": [ - "workfile", - "pointcache", - "camera", - "animation", - "model", - "mayaAscii", - "mayaScene", - "setdress", - "layout", - "ass", - "vdbcache", - "scene", - "vrayproxy", - "vrayscene_layer", - "render", - "prerender", - "imagesequence", - "review", - "rendersetup", - "rig", - "plate", - "look", - "audio", - "yetiRig", - "yeticache", - "nukenodes", - "gizmo", - "source", - "matchmove", - "image", - "assembly", - "fbx", - "textures", - "action", - "harmony.template", - "harmony.palette", - "editorial", - "background", - "camerarig", - "redshiftproxy", - "effect", - "xgen", - "hda", - "usd", - "staticMesh", - "skeletalMesh", - "mvLook", - "mvUsd", - "mvUsdComposition", - "mvUsdOverride", - "simpleUnrealTexture" ], "template_name_profiles": [ { @@ -287,7 +221,22 @@ }, "IntegrateAsset": { "hosts": [ - "maya" + "maya", + "aftereffects", + "blender", + "celaction", + "flame", + "fusion", + "harmony", + "hiero", + "houdini", + "nuke", + "photoshop", + "resolve", + "tvpaint", + "unreal", + "standalonepublisher", + "webpublisher" ], "families": [ "workfile", diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json index 71eed2e2de..5e3978a2df 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json @@ -584,7 +584,7 @@ "type": "dict", "collapsible": true, "key": "IntegrateAssetNew", - "label": "IntegrateAssetNew", + "label": "IntegrateAsset (Legacy)", "is_group": true, "children": [ { @@ -651,7 +651,7 @@ "type": "dict", "collapsible": true, "key": "IntegrateAsset", - "label": "IntegrateAsset", + "label": "Integrate Asset", "is_group": true, "children": [ { From 7646c54da87fe04570ba67027bbf5af308cc7b83 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Mon, 18 Jul 2022 11:36:09 +0200 Subject: [PATCH 401/785] move subset group collecting to early integrator --- ...{collect_subset_group.py => integrate_subset_group.py} | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) rename openpype/plugins/publish/{collect_subset_group.py => integrate_subset_group.py} (94%) diff --git a/openpype/plugins/publish/collect_subset_group.py b/openpype/plugins/publish/integrate_subset_group.py similarity index 94% rename from openpype/plugins/publish/collect_subset_group.py rename to openpype/plugins/publish/integrate_subset_group.py index 56cd7de94e..4b566e8908 100644 --- a/openpype/plugins/publish/collect_subset_group.py +++ b/openpype/plugins/publish/integrate_subset_group.py @@ -17,12 +17,12 @@ from openpype.lib import ( ) -class CollectSubsetGroup(pyblish.api.InstancePlugin): - """Collect Subset Group for publish.""" +class IntegrateSubsetGroup(pyblish.api.InstancePlugin): + """Integrate Subset Group for publish.""" # Run after CollectAnatomyInstanceData - order = pyblish.api.CollectorOrder + 0.495 - label = "Collect Subset Group" + order = pyblish.api.IntegratorOrder - 0.1 + label = "Subset Group" # Attributes set by settings subset_grouping_profiles = None From 0c59f1539872981ff896119ef7bb729c4c08064d Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Mon, 18 Jul 2022 11:38:28 +0200 Subject: [PATCH 402/785] rename old integrator to integrate legacy --- .../plugins/publish/{integrate_new.py => integrate_legacy.py} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename openpype/plugins/publish/{integrate_new.py => integrate_legacy.py} (100%) diff --git a/openpype/plugins/publish/integrate_new.py b/openpype/plugins/publish/integrate_legacy.py similarity index 100% rename from openpype/plugins/publish/integrate_new.py rename to openpype/plugins/publish/integrate_legacy.py From d4a29c39aaae2c383ea9b887b294eab91352901d Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 18 Jul 2022 11:40:48 +0200 Subject: [PATCH 403/785] added deprecation warning to functions --- openpype/lib/mongo.py | 30 ++++++++++++++++++++++++++++++ 1 file changed, 30 insertions(+) diff --git a/openpype/lib/mongo.py b/openpype/lib/mongo.py index 80487f317d..bb2ee6016a 100644 --- a/openpype/lib/mongo.py +++ b/openpype/lib/mongo.py @@ -1,21 +1,51 @@ +import warnings +import functools from openpype.client.mongo import ( MongoEnvNotSet, OpenPypeMongoConnection, ) +class MongoDeprecatedWarning(DeprecationWarning): + pass + + +def mongo_deprecated(func): + """Mark functions as deprecated. + + It will result in a warning being emitted when the function is used. + """ + + @functools.wraps(func) + def new_func(*args, **kwargs): + warnings.simplefilter("always", MongoDeprecatedWarning) + warnings.warn( + ( + "Call to deprecated function '{}'." + " Function was moved to 'openpype.client.mongo'." + ).format(func.__name__), + category=MongoDeprecatedWarning, + stacklevel=2 + ) + return func(*args, **kwargs) + return new_func + + +@mongo_deprecated def get_default_components(): from openpype.client.mongo import get_default_components return get_default_components() +@mongo_deprecated def should_add_certificate_path_to_mongo_url(mongo_url): from openpype.client.mongo import should_add_certificate_path_to_mongo_url return should_add_certificate_path_to_mongo_url(mongo_url) +@mongo_deprecated def validate_mongo_connection(mongo_uri): from openpype.client.mongo import validate_mongo_connection From ca2f554a1c4b167d444bbdde1c962f4f9bd7198d Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 18 Jul 2022 17:27:49 +0200 Subject: [PATCH 404/785] escape html chars from label in widgets --- openpype/tools/publisher/publish_report_viewer/model.py | 4 +++- openpype/tools/publisher/widgets/card_view_widgets.py | 3 ++- openpype/tools/publisher/widgets/list_view_widgets.py | 7 +++++-- 3 files changed, 10 insertions(+), 4 deletions(-) diff --git a/openpype/tools/publisher/publish_report_viewer/model.py b/openpype/tools/publisher/publish_report_viewer/model.py index a88129a358..bd03376c55 100644 --- a/openpype/tools/publisher/publish_report_viewer/model.py +++ b/openpype/tools/publisher/publish_report_viewer/model.py @@ -1,4 +1,5 @@ import uuid +import html from Qt import QtCore, QtGui import pyblish.api @@ -45,7 +46,8 @@ class InstancesModel(QtGui.QStandardItemModel): all_removed = True for instance_item in instance_items: item = QtGui.QStandardItem(instance_item.label) - item.setData(instance_item.label, ITEM_LABEL_ROLE) + instance_label = html.escape(instance_item.label) + item.setData(instance_label, ITEM_LABEL_ROLE) item.setData(instance_item.errored, ITEM_ERRORED_ROLE) item.setData(instance_item.id, ITEM_ID_ROLE) item.setData(instance_item.removed, INSTANCE_REMOVED_ROLE) diff --git a/openpype/tools/publisher/widgets/card_view_widgets.py b/openpype/tools/publisher/widgets/card_view_widgets.py index b6fcee7edb..5a6878ddca 100644 --- a/openpype/tools/publisher/widgets/card_view_widgets.py +++ b/openpype/tools/publisher/widgets/card_view_widgets.py @@ -22,6 +22,7 @@ Only one item can be selected at a time. import re import collections +import html from Qt import QtWidgets, QtCore @@ -303,7 +304,7 @@ class InstanceCardWidget(CardWidget): self._last_variant = variant self._last_subset_name = subset_name # Make `variant` bold - label = self.instance.label + label = html.escape(self.instance.label) found_parts = set(re.findall(variant, label, re.IGNORECASE)) if found_parts: for part in found_parts: diff --git a/openpype/tools/publisher/widgets/list_view_widgets.py b/openpype/tools/publisher/widgets/list_view_widgets.py index 3476ee487e..3e4fd5b72d 100644 --- a/openpype/tools/publisher/widgets/list_view_widgets.py +++ b/openpype/tools/publisher/widgets/list_view_widgets.py @@ -23,6 +23,7 @@ selection can be enabled disabled using checkbox or keyboard key presses: ``` """ import collections +import html from Qt import QtWidgets, QtCore, QtGui @@ -113,7 +114,9 @@ class InstanceListItemWidget(QtWidgets.QWidget): self.instance = instance - subset_name_label = QtWidgets.QLabel(instance.label, self) + instance_label = html.escape(instance.label) + + subset_name_label = QtWidgets.QLabel(instance_label, self) subset_name_label.setObjectName("ListViewSubsetName") active_checkbox = NiceCheckbox(parent=self) @@ -178,7 +181,7 @@ class InstanceListItemWidget(QtWidgets.QWidget): # Check subset name label = self.instance.label if label != self._instance_label_widget.text(): - self._instance_label_widget.setText(label) + self._instance_label_widget.setText(html.escape(label)) # Check active state self.set_active(self.instance["active"]) # Check valid states From ddf07d790886a54e21eb3af0358102bbfc693643 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 18 Jul 2022 17:30:02 +0200 Subject: [PATCH 405/785] handle cases when task is not set and subset name requires it --- .../plugins/create/create_mov_batch.py | 37 ++++++++++++++----- 1 file changed, 28 insertions(+), 9 deletions(-) diff --git a/openpype/hosts/traypublisher/plugins/create/create_mov_batch.py b/openpype/hosts/traypublisher/plugins/create/create_mov_batch.py index 67f8848e05..840b0647f9 100644 --- a/openpype/hosts/traypublisher/plugins/create/create_mov_batch.py +++ b/openpype/hosts/traypublisher/plugins/create/create_mov_batch.py @@ -4,7 +4,12 @@ import re from openpype.client import get_assets, get_asset_by_name from openpype.hosts.traypublisher.api import pipeline -from openpype.lib import FileDef, BoolDef, get_subset_name_with_asset_doc +from openpype.lib import ( + FileDef, + BoolDef, + get_subset_name_with_asset_doc, + TaskNotSetError, +) from openpype.pipeline import ( CreatedInstance, CreatorError @@ -124,13 +129,27 @@ class BatchMovCreator(TrayPublishCreator): """Create subset name according to standard template process""" task_name = self._get_task_name(asset_doc) - subset_name = get_subset_name_with_asset_doc( - self.family, - variant, - task_name, - asset_doc, - project_name - ) + try: + subset_name = get_subset_name_with_asset_doc( + self.family, + variant, + task_name, + asset_doc, + project_name + ) + except TaskNotSetError: + # Create instance with fake task + # - instance will be marked as invalid so it can't be published + # but user have ability to change it + # NOTE: This expect that there is not task 'Undefined' on asset + task_name = "Undefined" + subset_name = get_subset_name_with_asset_doc( + self.family, + variant, + task_name, + asset_doc, + project_name + ) return subset_name, task_name @@ -178,7 +197,7 @@ class BatchMovCreator(TrayPublishCreator): def get_detail_description(self): return """# Publish batch of .mov to multiple assets. - + File names must then contain only asset name, or asset name + version. (eg. 'chair.mov', 'chair_v001.mov', not really safe `my_chair_v001.mov` """ From 84781c12e570b085f533421c5c8ef0712f611504 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 18 Jul 2022 18:12:04 +0200 Subject: [PATCH 406/785] call 'bulk_write' directly on legacy_io --- openpype/plugins/publish/integrate.py | 10 ++-------- 1 file changed, 2 insertions(+), 8 deletions(-) diff --git a/openpype/plugins/publish/integrate.py b/openpype/plugins/publish/integrate.py index 5e86eb014a..790f96d419 100644 --- a/openpype/plugins/publish/integrate.py +++ b/openpype/plugins/publish/integrate.py @@ -79,12 +79,6 @@ def get_first_frame_padded(collection): return get_frame_padded(start_frame, padding=collection.padding) -def bulk_write(writes): - """Convenience function to bulk write into active project database""" - project = legacy_io.Session["AVALON_PROJECT"] - return legacy_io._database[project].bulk_write(writes) - - class IntegrateAsset(pyblish.api.InstancePlugin): """Register publish in the database and transfer files to destinations. @@ -288,7 +282,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin): # Transaction to reduce the chances of another publish trying to # publish to the same version number since that chance can greatly # increase if the file transaction takes a long time. - bulk_write(subset_writes + version_writes) + legacy_io.bulk_write(subset_writes + version_writes) self.log.info("Subset {subset[name]} and Version {version[name]} " "written to database..".format(subset=subset, version=version)) @@ -362,7 +356,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin): )) # Write representations to the database - bulk_write(representation_writes) + legacy_io.bulk_write(representation_writes) # Backwards compatibility # todo: can we avoid the need to store this? From 842cf06bf95458537d22eae3031bb7c64586e308 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 18 Jul 2022 18:13:33 +0200 Subject: [PATCH 407/785] new integrator can tell legacy one that should not process the instance --- openpype/plugins/publish/integrate.py | 2 +- openpype/plugins/publish/integrate_legacy.py | 4 ++++ 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/openpype/plugins/publish/integrate.py b/openpype/plugins/publish/integrate.py index 790f96d419..71032a1d96 100644 --- a/openpype/plugins/publish/integrate.py +++ b/openpype/plugins/publish/integrate.py @@ -171,7 +171,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin): template_name_profiles = None def process(self, instance): - + instance.data["processedWithNewIntegrator"] = True # Exclude instances that also contain families from exclude families families = set(get_instance_families(instance)) exclude = families & set(self.exclude_families) diff --git a/openpype/plugins/publish/integrate_legacy.py b/openpype/plugins/publish/integrate_legacy.py index 797479af45..18e4035602 100644 --- a/openpype/plugins/publish/integrate_legacy.py +++ b/openpype/plugins/publish/integrate_legacy.py @@ -145,6 +145,10 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): subset_grouping_profiles = None def process(self, instance): + if instance.data.get("processedWithNewIntegrator"): + self.log.info("Instance was already processed with new integrator") + return + for ef in self.exclude_families: if ( instance.data["family"] == ef or From a1784fc25e2b433e1f32e35a36588205b9904e98 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 18 Jul 2022 18:26:27 +0200 Subject: [PATCH 408/785] representations are checked before instance registration begins --- openpype/plugins/publish/integrate.py | 80 +++++++++++++++++---------- 1 file changed, 50 insertions(+), 30 deletions(-) diff --git a/openpype/plugins/publish/integrate.py b/openpype/plugins/publish/integrate.py index 71032a1d96..97f99bdba7 100644 --- a/openpype/plugins/publish/integrate.py +++ b/openpype/plugins/publish/integrate.py @@ -14,6 +14,7 @@ from openpype.modules import ModulesManager from openpype.lib.profiles_filtering import filter_profiles from openpype.lib.file_transaction import FileTransaction from openpype.pipeline import legacy_io +from openpype.pipeline.publish import KnownPublishError log = logging.getLogger(__name__) @@ -172,6 +173,17 @@ class IntegrateAsset(pyblish.api.InstancePlugin): def process(self, instance): instance.data["processedWithNewIntegrator"] = True + + filtered_repres = self.filter_representations(instance) + # Skip instance if there are not representations to integrate + # all representations should not be integrated + if not filtered_repres: + self.log.warning(( + "Skipping, there are no representations" + " to integrate for instance {}" + ).format(instance.data["family"])) + return + # Exclude instances that also contain families from exclude families families = set(get_instance_families(instance)) exclude = families & set(self.exclude_families) @@ -182,7 +194,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin): file_transactions = FileTransaction(log=self.log) try: - self.register(instance, file_transactions) + self.register(instance, file_transactions, filtered_repres) except Exception: # clean destination # todo: preferably we'd also rollback *any* changes to the database @@ -194,8 +206,35 @@ class IntegrateAsset(pyblish.api.InstancePlugin): # the try, except. file_transactions.finalize() - def register(self, instance, file_transactions): + def filter_representations(self, instance): + # Prepare repsentations that should be integrated + repres = instance.data.get("representations") + # Raise error if instance don't have any representations + if not repres: + raise KnownPublishError( + "Instance {} has no representations to integrate".format( + instance.data["family"] + ) + ) + # Validate type of stored representations + if not isinstance(repres, (list, tuple)): + raise TypeError( + "Instance 'files' must be a list, got: {0} {1}".format( + str(type(repres)), str(repres) + ) + ) + + # Filter representations + filtered_repres = [] + for repre in repres: + if "delete" in repre.get("tags", []): + continue + filtered_repres.append(repre) + + return filtered_repres + + def register(self, instance, file_transactions, filtered_repres): instance_stagingdir = instance.data.get("stagingDir") if not instance_stagingdir: self.log.info(( @@ -209,15 +248,6 @@ class IntegrateAsset(pyblish.api.InstancePlugin): "@ {0}".format(instance_stagingdir) ) - # Ensure at least one representation is set up for registering. - repres = instance.data.get("representations") - assert repres, "Instance has no representations data" - assert isinstance(repres, (list, tuple)), ( - "Instance 'representations' must be a list, got: {0} {1}".format( - str(type(repres)), str(repres) - ) - ) - template_name = self.get_template_name(instance) subset, subset_writes = self.prepare_subset(instance) @@ -238,20 +268,15 @@ class IntegrateAsset(pyblish.api.InstancePlugin): # Prepare all representations prepared_representations = [] - for repre in instance.data["representations"]: - - if "delete" in repre.get("tags", []): - self.log.debug("Skipping representation marked for deletion: " - "{}".format(repre)) - continue - + for repre in filtered_repres: # todo: reduce/simplify what is returned from this function - prepared = self.prepare_representation(repre, - template_name, - existing_repres_by_name, - version, - instance_stagingdir, - instance) + prepared = self.prepare_representation( + repre, + template_name, + existing_repres_by_name, + version, + instance_stagingdir, + instance) for src, dst in prepared["transfers"]: # todo: add support for hardlink transfers @@ -259,12 +284,6 @@ class IntegrateAsset(pyblish.api.InstancePlugin): prepared_representations.append(prepared) - if not prepared_representations: - # Even though we check `instance.data["representations"]` earlier - # this could still happen if all representations were tagged with - # "delete" and thus are skipped for integration - raise RuntimeError("No representations prepared to publish.") - # Each instance can also have pre-defined transfers not explicitly # part of a representation - like texture resources used by a # .ma representation. Those destination paths are pre-defined, etc. @@ -273,6 +292,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin): for src, dst in instance.data.get("transfers", []): file_transactions.add(src, dst, mode=FileTransaction.MODE_COPY) resource_destinations.add(os.path.abspath(dst)) + for src, dst in instance.data.get("hardlinks", []): file_transactions.add(src, dst, mode=FileTransaction.MODE_HARDLINK) resource_destinations.add(os.path.abspath(dst)) From 7016ca41f7809a1d3729e4999ddf4c1c0dfe1299 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 18 Jul 2022 18:27:37 +0200 Subject: [PATCH 409/785] use already prepared modules from context --- openpype/plugins/publish/integrate.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/plugins/publish/integrate.py b/openpype/plugins/publish/integrate.py index 97f99bdba7..8fe5138963 100644 --- a/openpype/plugins/publish/integrate.py +++ b/openpype/plugins/publish/integrate.py @@ -317,8 +317,8 @@ class IntegrateAsset(pyblish.api.InstancePlugin): self.log.debug("Retrieving Representation Site Sync information ...") # Get the accessible sites for Site Sync - manager = ModulesManager() - sync_server_module = manager.modules_by_name["sync_server"] + modules_by_name = instance.context.data["openPypeModules"] + sync_server_module = modules_by_name["sync_server"] sites = sync_server_module.compute_resource_sync_sites( project_name=instance.data["projectEntity"]["name"] ) From d04abc3767f7c5990c6e0a8a420229bc034075f0 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 18 Jul 2022 18:27:54 +0200 Subject: [PATCH 410/785] use host name from context data --- openpype/plugins/publish/integrate.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/openpype/plugins/publish/integrate.py b/openpype/plugins/publish/integrate.py index 8fe5138963..e76adb55b8 100644 --- a/openpype/plugins/publish/integrate.py +++ b/openpype/plugins/publish/integrate.py @@ -310,10 +310,10 @@ class IntegrateAsset(pyblish.api.InstancePlugin): # Process all file transfers of all integrations now self.log.debug("Integrating source files to destination ...") file_transactions.process() - self.log.debug("Backed up existing files: " - "{}".format(file_transactions.backups)) - self.log.debug("Transferred files: " - "{}".format(file_transactions.transferred)) + self.log.debug( + "Backed up existing files: {}".format(file_transactions.backups)) + self.log.debug( + "Transferred files: {}".format(file_transactions.transferred)) self.log.debug("Retrieving Representation Site Sync information ...") # Get the accessible sites for Site Sync @@ -780,8 +780,8 @@ class IntegrateAsset(pyblish.api.InstancePlugin): return { "families": anatomy_data["family"], "tasks": task.get("name"), - "hosts": anatomy_data["app"], - "task_types": task.get("type") + "task_types": task.get("type"), + "hosts": instance.context["hostName"], } def get_rootless_path(self, anatomy, path): From 79c01bdf1a83f4b5d4b57ce0afcc237f696a6387 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 18 Jul 2022 18:28:29 +0200 Subject: [PATCH 411/785] skip instances marked to be integrated on farm --- openpype/plugins/publish/integrate.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/openpype/plugins/publish/integrate.py b/openpype/plugins/publish/integrate.py index e76adb55b8..e3a81091ba 100644 --- a/openpype/plugins/publish/integrate.py +++ b/openpype/plugins/publish/integrate.py @@ -172,8 +172,15 @@ class IntegrateAsset(pyblish.api.InstancePlugin): template_name_profiles = None def process(self, instance): + # Mark instance as processed for legacy integrator instance.data["processedWithNewIntegrator"] = True + # Instance should be integrated on a farm + if instance.data.get("farm"): + self.log.info( + "Instance is marked to be processed on farm. Skipping") + return + filtered_repres = self.filter_representations(instance) # Skip instance if there are not representations to integrate # all representations should not be integrated From f398fae425d66c1b9934e9ec016fa1634761f633 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 19 Jul 2022 09:48:22 +0200 Subject: [PATCH 412/785] simplified settings for skipping of families --- .../defaults/project_settings/global.json | 119 +----------------- .../schemas/schema_global_publish.json | 60 +-------- 2 files changed, 7 insertions(+), 172 deletions(-) diff --git a/openpype/settings/defaults/project_settings/global.json b/openpype/settings/defaults/project_settings/global.json index d923fc65c9..bdcf85d1b2 100644 --- a/openpype/settings/defaults/project_settings/global.json +++ b/openpype/settings/defaults/project_settings/global.json @@ -171,10 +171,6 @@ ] }, "IntegrateAssetNew": { - "hosts": [ - ], - "families": [ - ], "template_name_profiles": [ { "families": [], @@ -220,120 +216,7 @@ ] }, "IntegrateAsset": { - "hosts": [ - "maya", - "aftereffects", - "blender", - "celaction", - "flame", - "fusion", - "harmony", - "hiero", - "houdini", - "nuke", - "photoshop", - "resolve", - "tvpaint", - "unreal", - "standalonepublisher", - "webpublisher" - ], - "families": [ - "workfile", - "pointcache", - "camera", - "animation", - "model", - "mayaAscii", - "mayaScene", - "setdress", - "layout", - "ass", - "vdbcache", - "scene", - "vrayproxy", - "vrayscene_layer", - "render", - "prerender", - "imagesequence", - "review", - "rendersetup", - "rig", - "plate", - "look", - "audio", - "yetiRig", - "yeticache", - "nukenodes", - "gizmo", - "source", - "matchmove", - "image", - "assembly", - "fbx", - "textures", - "action", - "harmony.template", - "harmony.palette", - "editorial", - "background", - "camerarig", - "redshiftproxy", - "effect", - "xgen", - "hda", - "usd", - "staticMesh", - "skeletalMesh", - "mvLook", - "mvUsd", - "mvUsdComposition", - "mvUsdOverride", - "simpleUnrealTexture" - ], - "template_name_profiles": [ - { - "families": [], - "hosts": [], - "task_types": [], - "tasks": [], - "template_name": "publish" - }, - { - "families": [ - "review", - "render", - "prerender" - ], - "hosts": [], - "task_types": [], - "tasks": [], - "template_name": "render" - }, - { - "families": [ - "simpleUnrealTexture" - ], - "hosts": [ - "standalonepublisher" - ], - "task_types": [], - "tasks": [], - "template_name": "simpleUnrealTexture" - }, - { - "families": [ - "staticMesh", - "skeletalMesh" - ], - "hosts": [ - "maya" - ], - "task_types": [], - "tasks": [], - "template_name": "maya2unreal" - } - ] + "skip_host_families": [] }, "IntegrateHeroVersion": { "enabled": true, diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json index 5e3978a2df..41eb04be4e 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json @@ -587,18 +587,6 @@ "label": "IntegrateAsset (Legacy)", "is_group": true, "children": [ - { - "type": "list", - "key": "hosts", - "label": "Hosts", - "object_type": "text" - }, - { - "key": "families", - "label": "Families", - "type": "list", - "object_type": "text" - }, { "type": "list", "key": "template_name_profiles", @@ -656,58 +644,22 @@ "children": [ { "type": "list", - "key": "hosts", - "label": "Hosts", - "object_type": "text" - }, - { - "key": "families", - "label": "Families", - "type": "list", - "object_type": "text" - }, - { - "type": "list", - "key": "template_name_profiles", - "label": "Template name profiles", + "key": "skip_host_families", + "label": "Skip hosts and families", "use_label_wrap": true, "object_type": { "type": "dict", "children": [ { - "type": "label", - "label": "" + "type": "hosts-enum", + "key": "host", + "label": "Host" }, { + "type": "list", "key": "families", "label": "Families", - "type": "list", "object_type": "text" - }, - { - "type": "hosts-enum", - "key": "hosts", - "label": "Hosts", - "multiselection": true - }, - { - "key": "task_types", - "label": "Task types", - "type": "task-types-enum" - }, - { - "key": "tasks", - "label": "Task names", - "type": "list", - "object_type": "text" - }, - { - "type": "separator" - }, - { - "type": "text", - "key": "template_name", - "label": "Template name" } ] } From 1a024d3552723245c273362793ecee6b99f29823 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 19 Jul 2022 09:56:52 +0200 Subject: [PATCH 413/785] use settings to decide if new integrator should skip instances --- openpype/plugins/publish/integrate.py | 37 +++++++++++++++++++++++++++ 1 file changed, 37 insertions(+) diff --git a/openpype/plugins/publish/integrate.py b/openpype/plugins/publish/integrate.py index e3a81091ba..0b725750aa 100644 --- a/openpype/plugins/publish/integrate.py +++ b/openpype/plugins/publish/integrate.py @@ -167,11 +167,15 @@ class IntegrateAsset(pyblish.api.InstancePlugin): "project", "asset", "task", "subset", "version", "representation", "family", "hierarchy", "username" ] + skip_host_families = [] # Attributes set by settings template_name_profiles = None def process(self, instance): + if self._temp_skip_instance_by_settings(instance): + return + # Mark instance as processed for legacy integrator instance.data["processedWithNewIntegrator"] = True @@ -213,6 +217,39 @@ class IntegrateAsset(pyblish.api.InstancePlugin): # the try, except. file_transactions.finalize() + def _temp_skip_instance_by_settings(self, instance): + """Decide if instance will be processed with new or legacy integrator. + + This is temporary solution until we test all usecases with new (this) + integrator plugin. + """ + + host_name = instance.context.data["hostName"] + instance_family = instance.data["family"] + instance_families = set(instance.data.get("families") or []) + + skip = False + for item in self.skip_host_families: + if item["host"] != host_name: + continue + + families = set(item["families"]) + if instance_family in families: + skip = True + break + + for family in instance_families: + if family in families: + skip = True + break + + if skip: + break + + if skip: + self.log.debug("Instance is marked to be skipped by settings.") + return skip + def filter_representations(self, instance): # Prepare repsentations that should be integrated repres = instance.data.get("representations") From 0ebd4b7c9afffa3174ae876f94ca22fa52f83627 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 19 Jul 2022 09:57:00 +0200 Subject: [PATCH 414/785] added remaining hosts to integrator hosts --- openpype/plugins/publish/integrate.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/openpype/plugins/publish/integrate.py b/openpype/plugins/publish/integrate.py index 0b725750aa..3c61d01858 100644 --- a/openpype/plugins/publish/integrate.py +++ b/openpype/plugins/publish/integrate.py @@ -105,7 +105,10 @@ class IntegrateAsset(pyblish.api.InstancePlugin): label = "Integrate Asset" order = pyblish.api.IntegratorOrder - hosts = ["maya"] + hosts = ["aftereffects", "blender", "celaction", "flame", "harmony", + "hiero", "houdini", "nuke", "photoshop", "resolve", + "standalonepublisher", "traypublisher", "tvpaint", "unreal", + "webpublisher"] families = ["workfile", "pointcache", "camera", From 6c457b2ed1331bb193a572803670836b40f16667 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 19 Jul 2022 10:10:22 +0200 Subject: [PATCH 415/785] use settings for publish templates from legacy integrator --- openpype/plugins/publish/integrate.py | 33 ++++++++++++++++++++------- 1 file changed, 25 insertions(+), 8 deletions(-) diff --git a/openpype/plugins/publish/integrate.py b/openpype/plugins/publish/integrate.py index 3c61d01858..1b8015c946 100644 --- a/openpype/plugins/publish/integrate.py +++ b/openpype/plugins/publish/integrate.py @@ -172,9 +172,6 @@ class IntegrateAsset(pyblish.api.InstancePlugin): ] skip_host_families = [] - # Attributes set by settings - template_name_profiles = None - def process(self, instance): if self._temp_skip_instance_by_settings(instance): return @@ -807,13 +804,33 @@ class IntegrateAsset(pyblish.api.InstancePlugin): """Return anatomy template name to use for integration""" # Define publish template name from profiles filter_criteria = self.get_profile_filter_criteria(instance) - profile = filter_profiles(self.template_name_profiles, - filter_criteria, - logger=self.log) + template_name_profiles = self._get_template_name_profiles(instance) + profile = filter_profiles( + template_name_profiles, + filter_criteria, + logger=self.log + ) + if profile: return profile["template_name"] - else: - return self.default_template_name + return self.default_template_name + + def _get_template_name_profiles(self, instance): + """Receive profiles for publish template keys. + + Reuse template name profiles from legacy integrator. Goal is to move + the profile settings out of plugin settings but until that happens we + want to be able set it at one place and don't break backwards + compatibility (more then once). + """ + + return ( + instance.context["project_settings"] + ["global"] + ["publish"] + ["IntegrateAssetNew"] + ["template_name_profiles"] + ) def get_profile_filter_criteria(self, instance): """Return filter criteria for `filter_profiles`""" From 30db574170a526759ac6c40c574aac0ed41bcdea Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 19 Jul 2022 10:18:41 +0200 Subject: [PATCH 416/785] fixed data access --- openpype/plugins/publish/integrate.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/plugins/publish/integrate.py b/openpype/plugins/publish/integrate.py index 1b8015c946..c9eb26d0b7 100644 --- a/openpype/plugins/publish/integrate.py +++ b/openpype/plugins/publish/integrate.py @@ -825,7 +825,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin): """ return ( - instance.context["project_settings"] + instance.context.data["project_settings"] ["global"] ["publish"] ["IntegrateAssetNew"] @@ -845,7 +845,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin): "families": anatomy_data["family"], "tasks": task.get("name"), "task_types": task.get("type"), - "hosts": instance.context["hostName"], + "hosts": instance.context.data["hostName"], } def get_rootless_path(self, anatomy, path): From 3c35cbc700102270c55adfa8b249ad9f75ebd226 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 19 Jul 2022 10:23:13 +0200 Subject: [PATCH 417/785] make sure legacy integrator happens after new integrator --- openpype/plugins/publish/integrate.py | 1 - openpype/plugins/publish/integrate_legacy.py | 3 ++- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/plugins/publish/integrate.py b/openpype/plugins/publish/integrate.py index c9eb26d0b7..cfaff4067b 100644 --- a/openpype/plugins/publish/integrate.py +++ b/openpype/plugins/publish/integrate.py @@ -10,7 +10,6 @@ from pymongo import DeleteMany, ReplaceOne, InsertOne, UpdateOne import pyblish.api import openpype.api -from openpype.modules import ModulesManager from openpype.lib.profiles_filtering import filter_profiles from openpype.lib.file_transaction import FileTransaction from openpype.pipeline import legacy_io diff --git a/openpype/plugins/publish/integrate_legacy.py b/openpype/plugins/publish/integrate_legacy.py index 18e4035602..34e81a3839 100644 --- a/openpype/plugins/publish/integrate_legacy.py +++ b/openpype/plugins/publish/integrate_legacy.py @@ -70,7 +70,8 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): """ label = "Integrate Asset (legacy)" - order = pyblish.api.IntegratorOrder + # Make sure it happens after new integrator + order = pyblish.api.IntegratorOrder + 0.00001 hosts = ["aftereffects", "blender", "celaction", "flame", "harmony", "hiero", "houdini", "nuke", "photoshop", "resolve", "standalonepublisher", "traypublisher", "tvpaint", "unreal", From dc569c6d65e2a211681a4f0d17aea1874cef9268 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 19 Jul 2022 10:30:31 +0200 Subject: [PATCH 418/785] rename and move CollectSubsetGroup to IntegrateSubsetGroup in settings --- .../defaults/project_settings/global.json | 22 ++-- .../schemas/schema_global_publish.json | 110 +++++++++--------- 2 files changed, 66 insertions(+), 66 deletions(-) diff --git a/openpype/settings/defaults/project_settings/global.json b/openpype/settings/defaults/project_settings/global.json index bdcf85d1b2..9247c6ceb6 100644 --- a/openpype/settings/defaults/project_settings/global.json +++ b/openpype/settings/defaults/project_settings/global.json @@ -20,17 +20,6 @@ ], "skip_hosts_headless_publish": [] }, - "CollectSubsetGroup": { - "subset_grouping_profiles": [ - { - "families": [], - "hosts": [], - "task_types": [], - "tasks": [], - "template": "" - } - ] - }, "ValidateEditorialAssetName": { "enabled": true, "optional": false @@ -170,6 +159,17 @@ } ] }, + "IntegrateSubsetGroup": { + "subset_grouping_profiles": [ + { + "families": [], + "hosts": [], + "task_types": [], + "tasks": [], + "template": "" + } + ] + }, "IntegrateAssetNew": { "template_name_profiles": [ { diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json index 41eb04be4e..af08bbec3c 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json @@ -39,61 +39,6 @@ } ] }, - { - "type": "dict", - "collapsible": true, - "key": "CollectSubsetGroup", - "label": "Collect Subset Group", - "is_group": true, - "children": [ - { - "type": "list", - "key": "subset_grouping_profiles", - "label": "Subset grouping profiles", - "use_label_wrap": true, - "object_type": { - "type": "dict", - "children": [ - { - "type": "label", - "label": "Set all published instances as a part of specific group named according to 'Template'.
Implemented all variants of placeholders [{task},{family},{host},{subset},{renderlayer}]" - }, - { - "key": "families", - "label": "Families", - "type": "list", - "object_type": "text" - }, - { - "type": "hosts-enum", - "key": "hosts", - "label": "Hosts", - "multiselection": true - }, - { - "key": "task_types", - "label": "Task types", - "type": "task-types-enum" - }, - { - "key": "tasks", - "label": "Task names", - "type": "list", - "object_type": "text" - }, - { - "type": "separator" - }, - { - "type": "text", - "key": "template", - "label": "Template" - } - ] - } - } - ] - }, { "type": "dict", "collapsible": true, @@ -580,6 +525,61 @@ } ] }, + { + "type": "dict", + "collapsible": true, + "key": "IntegrateSubsetGroup", + "label": "Integrate Subset Group", + "is_group": true, + "children": [ + { + "type": "list", + "key": "subset_grouping_profiles", + "label": "Subset grouping profiles", + "use_label_wrap": true, + "object_type": { + "type": "dict", + "children": [ + { + "type": "label", + "label": "Set all published instances as a part of specific group named according to 'Template'.
Implemented all variants of placeholders [{task},{family},{host},{subset},{renderlayer}]" + }, + { + "key": "families", + "label": "Families", + "type": "list", + "object_type": "text" + }, + { + "type": "hosts-enum", + "key": "hosts", + "label": "Hosts", + "multiselection": true + }, + { + "key": "task_types", + "label": "Task types", + "type": "task-types-enum" + }, + { + "key": "tasks", + "label": "Task names", + "type": "list", + "object_type": "text" + }, + { + "type": "separator" + }, + { + "type": "text", + "key": "template", + "label": "Template" + } + ] + } + } + ] + }, { "type": "dict", "collapsible": true, From 76c594af9a39f22bbbb00156c09d67bd9d0d2d0b Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 19 Jul 2022 10:30:48 +0200 Subject: [PATCH 419/785] check for existence of subset group on instance before profiling --- .../plugins/publish/integrate_subset_group.py | 30 +++++++++++-------- 1 file changed, 17 insertions(+), 13 deletions(-) diff --git a/openpype/plugins/publish/integrate_subset_group.py b/openpype/plugins/publish/integrate_subset_group.py index 4b566e8908..910cb060a6 100644 --- a/openpype/plugins/publish/integrate_subset_group.py +++ b/openpype/plugins/publish/integrate_subset_group.py @@ -37,18 +37,22 @@ class IntegrateSubsetGroup(pyblish.api.InstancePlugin): if not self.subset_grouping_profiles: return - # Skip if there is no matching profile - filter_criteria = self.get_profile_filter_criteria(instance) - profile = filter_profiles(self.subset_grouping_profiles, - filter_criteria, - logger=self.log) - if not profile: - return - if instance.data.get("subsetGroup"): # If subsetGroup is already set then allow that value to remain - self.log.debug("Skipping collect subset group due to existing " - "value: {}".format(instance.data["subsetGroup"])) + self.log.debug(( + "Skipping collect subset group due to existing value: {}" + ).format(instance.data["subsetGroup"])) + return + + # Skip if there is no matching profile + filter_criteria = self.get_profile_filter_criteria(instance) + profile = filter_profiles( + self.subset_grouping_profiles, + filter_criteria, + logger=self.log + ) + + if not profile: return template = profile["template"] @@ -68,9 +72,9 @@ class IntegrateSubsetGroup(pyblish.api.InstancePlugin): ) except (KeyError, TemplateUnsolved): keys = fill_pairs.keys() - msg = "Subset grouping failed. " \ - "Only {} are expected in Settings".format(','.join(keys)) - self.log.warning(msg) + self.log.warning(( + "Subset grouping failed. Only {} are expected in Settings" + ).format(','.join(keys))) if filled_template: instance.data["subsetGroup"] = filled_template From 508b3e7d8eccb554b5a6ed9f1660d611b780633d Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 19 Jul 2022 10:47:10 +0200 Subject: [PATCH 420/785] Update openpype/hosts/traypublisher/plugins/publish/collect_mov_batch.py Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> --- .../hosts/traypublisher/plugins/publish/collect_mov_batch.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/traypublisher/plugins/publish/collect_mov_batch.py b/openpype/hosts/traypublisher/plugins/publish/collect_mov_batch.py index d24659aa8b..e6f33bc619 100644 --- a/openpype/hosts/traypublisher/plugins/publish/collect_mov_batch.py +++ b/openpype/hosts/traypublisher/plugins/publish/collect_mov_batch.py @@ -32,12 +32,11 @@ class CollectMovBatch( "name": ext[1:], "ext": ext[1:], "files": file_name, - "stagingDir": os.path.dirname(file_url) + "stagingDir": os.path.dirname(file_url), + "tags": [] } if creator_attributes["add_review_family"]: - if not repre.get("tags"): - repre["tags"] = [] repre["tags"].append("review") instance.data["families"].append("review") From fa10d36f589d1961f9dcb425129a0a4a69399b86 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 19 Jul 2022 10:47:43 +0200 Subject: [PATCH 421/785] Better creation of instance Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> --- .../hosts/traypublisher/plugins/create/create_mov_batch.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/openpype/hosts/traypublisher/plugins/create/create_mov_batch.py b/openpype/hosts/traypublisher/plugins/create/create_mov_batch.py index 840b0647f9..bbabd73415 100644 --- a/openpype/hosts/traypublisher/plugins/create/create_mov_batch.py +++ b/openpype/hosts/traypublisher/plugins/create/create_mov_batch.py @@ -64,10 +64,7 @@ class BatchMovCreator(TrayPublishCreator): # Create new instance new_instance = CreatedInstance(self.family, subset_name, instance_data, self) - # Host implementation of storing metadata about instance - pipeline.HostContext.add_instance(new_instance.data_to_store()) - # Add instance to current context - self._add_instance_to_context(new_instance) + self._store_new_instance(new_instance) def get_asset_doc_from_file_name(self, source_filename, project_name): """Try to parse out asset name from file name provided. From b4bd2542ff86dea9ce0a480ca6d1524900d6847f Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 19 Jul 2022 10:57:30 +0200 Subject: [PATCH 422/785] OP-3446 - renamed from mov to movie Workflow should handle not only .mov, but .mp4s or any movie format. --- ...eate_mov_batch.py => create_movie_batch.py} | 18 +++++++++++------- ...ect_mov_batch.py => collect_movie_batch.py} | 4 ++-- .../project_settings/traypublisher.json | 2 +- .../schema_project_traypublisher.json | 4 ++-- 4 files changed, 16 insertions(+), 12 deletions(-) rename openpype/hosts/traypublisher/plugins/create/{create_mov_batch.py => create_movie_batch.py} (94%) rename openpype/hosts/traypublisher/plugins/publish/{collect_mov_batch.py => collect_movie_batch.py} (97%) diff --git a/openpype/hosts/traypublisher/plugins/create/create_mov_batch.py b/openpype/hosts/traypublisher/plugins/create/create_movie_batch.py similarity index 94% rename from openpype/hosts/traypublisher/plugins/create/create_mov_batch.py rename to openpype/hosts/traypublisher/plugins/create/create_movie_batch.py index bbabd73415..63b9b2ef28 100644 --- a/openpype/hosts/traypublisher/plugins/create/create_mov_batch.py +++ b/openpype/hosts/traypublisher/plugins/create/create_movie_batch.py @@ -18,21 +18,25 @@ from openpype.pipeline import ( from openpype.hosts.traypublisher.api.plugin import TrayPublishCreator -class BatchMovCreator(TrayPublishCreator): - """Creates instances from .mov file(s).""" - identifier = "render_mov_batch" - label = "Batch Mov" +class BatchMovieCreator(TrayPublishCreator): + """Creates instances from movie file(s). + + Intended for .mov files, but should work for any video file. + Doesn't handle image sequences though. + """ + identifier = "render_movie_batch" + label = "Batch Movies" family = "render" - description = "Publish batch of movs" + description = "Publish batch of video files" create_allow_context_change = False version_regex = re.compile(r"^(.+)_v([0-9]+)$") def __init__(self, project_settings, *args, **kwargs): - super(BatchMovCreator, self).__init__(project_settings, + super(BatchMovieCreator, self).__init__(project_settings, *args, **kwargs) creator_settings = ( - project_settings["traypublisher"]["BatchMovCreator"] + project_settings["traypublisher"]["BatchMovieCreator"] ) self.default_variants = creator_settings["default_variants"] self.default_tasks = creator_settings["default_tasks"] diff --git a/openpype/hosts/traypublisher/plugins/publish/collect_mov_batch.py b/openpype/hosts/traypublisher/plugins/publish/collect_movie_batch.py similarity index 97% rename from openpype/hosts/traypublisher/plugins/publish/collect_mov_batch.py rename to openpype/hosts/traypublisher/plugins/publish/collect_movie_batch.py index e6f33bc619..45ccbd92d4 100644 --- a/openpype/hosts/traypublisher/plugins/publish/collect_mov_batch.py +++ b/openpype/hosts/traypublisher/plugins/publish/collect_movie_batch.py @@ -4,7 +4,7 @@ import pyblish.api from openpype.pipeline import OpenPypePyblishPluginMixin -class CollectMovBatch( +class CollectMovieBatch( pyblish.api.InstancePlugin, OpenPypePyblishPluginMixin ): """Collect file url for batch mov and create representation. @@ -19,7 +19,7 @@ class CollectMovBatch( hosts = ["traypublisher"] def process(self, instance): - if instance.data.get("creator_identifier") != "render_mov_batch": + if instance.data.get("creator_identifier") != "render_movie_batch": return creator_attributes = instance.data["creator_attributes"] diff --git a/openpype/settings/defaults/project_settings/traypublisher.json b/openpype/settings/defaults/project_settings/traypublisher.json index cbe58f49d6..8bf3e3b306 100644 --- a/openpype/settings/defaults/project_settings/traypublisher.json +++ b/openpype/settings/defaults/project_settings/traypublisher.json @@ -236,7 +236,7 @@ "extensions": [] } ], - "BatchMovCreator": { + "BatchMovieCreator": { "default_variants": ["Main"], "default_tasks": ["Compositing"], "extensions": [ diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_traypublisher.json b/openpype/settings/entities/schemas/projects_schema/schema_project_traypublisher.json index 50ba246c97..e38aa64e04 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_traypublisher.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_traypublisher.json @@ -87,8 +87,8 @@ { "type": "dict", "collapsible": true, - "key": "BatchMovCreator", - "label": "Batch Mov Creator", + "key": "BatchMovieCreator", + "label": "Batch Movie Creator", "use_label_wrap": true, "collapsible_key": true, "children": [ From 96be94c0c2b3aa2657bfe832b28904fbaec8ff53 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 19 Jul 2022 11:27:16 +0200 Subject: [PATCH 423/785] OP-3446 - added label to Settings Updated label for collector --- .../traypublisher/plugins/publish/collect_movie_batch.py | 4 ++-- .../projects_schema/schema_project_traypublisher.json | 5 ++++- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/traypublisher/plugins/publish/collect_movie_batch.py b/openpype/hosts/traypublisher/plugins/publish/collect_movie_batch.py index 45ccbd92d4..f37e04d1c9 100644 --- a/openpype/hosts/traypublisher/plugins/publish/collect_movie_batch.py +++ b/openpype/hosts/traypublisher/plugins/publish/collect_movie_batch.py @@ -7,13 +7,13 @@ from openpype.pipeline import OpenPypePyblishPluginMixin class CollectMovieBatch( pyblish.api.InstancePlugin, OpenPypePyblishPluginMixin ): - """Collect file url for batch mov and create representation. + """Collect file url for batch movies and create representation. Adds review on instance and to repre.tags based on value of toggle button on creator. """ - label = "Collect Mov Batch Files" + label = "Collect Movie Batch Files" order = pyblish.api.CollectorOrder hosts = ["traypublisher"] diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_traypublisher.json b/openpype/settings/entities/schemas/projects_schema/schema_project_traypublisher.json index e38aa64e04..8f0f864dc2 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_traypublisher.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_traypublisher.json @@ -89,9 +89,12 @@ "collapsible": true, "key": "BatchMovieCreator", "label": "Batch Movie Creator", - "use_label_wrap": true, "collapsible_key": true, "children": [ + { + "type": "label", + "label": "Allows to publish multiple video files in one go.
Name of matching asset is parsed from file names ('asset.mov', 'asset_v001.mov', 'my_asset_to_publish.mov')" + }, { "type": "list", "key": "default_variants", From 505b42ace2a7e07bfff9cb191e0f5f873d6af181 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 19 Jul 2022 11:29:34 +0200 Subject: [PATCH 424/785] OP-3446 - Hound --- .../hosts/traypublisher/plugins/create/create_movie_batch.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/openpype/hosts/traypublisher/plugins/create/create_movie_batch.py b/openpype/hosts/traypublisher/plugins/create/create_movie_batch.py index 63b9b2ef28..c5f0d6b75e 100644 --- a/openpype/hosts/traypublisher/plugins/create/create_movie_batch.py +++ b/openpype/hosts/traypublisher/plugins/create/create_movie_batch.py @@ -3,7 +3,6 @@ import os import re from openpype.client import get_assets, get_asset_by_name -from openpype.hosts.traypublisher.api import pipeline from openpype.lib import ( FileDef, BoolDef, @@ -34,7 +33,7 @@ class BatchMovieCreator(TrayPublishCreator): def __init__(self, project_settings, *args, **kwargs): super(BatchMovieCreator, self).__init__(project_settings, - *args, **kwargs) + *args, **kwargs) creator_settings = ( project_settings["traypublisher"]["BatchMovieCreator"] ) From c59a9cb05f641ccf8ec3638d69b1cf764cee9262 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?F=C3=A9lix=20David?= Date: Tue, 19 Jul 2022 14:40:01 +0200 Subject: [PATCH 425/785] Fix: shot duplicate name using shot's hierarchy (ep, seq) --- openpype/modules/kitsu/utils/update_op_with_zou.py | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/openpype/modules/kitsu/utils/update_op_with_zou.py b/openpype/modules/kitsu/utils/update_op_with_zou.py index 4695a49159..f43bf07e25 100644 --- a/openpype/modules/kitsu/utils/update_op_with_zou.py +++ b/openpype/modules/kitsu/utils/update_op_with_zou.py @@ -181,7 +181,7 @@ def update_op_assets( # Find root folder docs root_folder_docs = get_assets( project_name, - asset_name=[entity_parent_folders[-1]], + asset_names=[entity_parent_folders[-1]], fields=["_id", "data.root_of"] ) # NOTE: Not sure why it's checking for entity type? @@ -221,6 +221,14 @@ def update_op_assets( parent_entity = parent_doc["data"]["zou"] parent_zou_id = parent_entity["parent_id"] + # Item name + if item_type == "Asset": + item_name = item_doc["name"] + elif item_type == "Shot": + # Name with parents hierarchy "({episode}_){sequence}_{shot}" + # to avoid duplicate name issue + item_name = "_".join(item_data["parents"] + [item_doc["name"]]) + # Set root folders parents item_data["parents"] = entity_parent_folders + item_data["parents"] @@ -234,7 +242,7 @@ def update_op_assets( item_doc["_id"], { "$set": { - "name": item["name"], + "name": item_name, "data": item_data, "parent": asset_doc_ids[item["project_id"]]["_id"], } From 9f41a512cb4ab27884ee2ad0465c23837f8ccad3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?F=C3=A9lix=20David?= Date: Tue, 19 Jul 2022 14:44:25 +0200 Subject: [PATCH 426/785] black --- openpype/modules/kitsu/utils/update_op_with_zou.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/modules/kitsu/utils/update_op_with_zou.py b/openpype/modules/kitsu/utils/update_op_with_zou.py index f43bf07e25..9a5dd7db61 100644 --- a/openpype/modules/kitsu/utils/update_op_with_zou.py +++ b/openpype/modules/kitsu/utils/update_op_with_zou.py @@ -14,7 +14,7 @@ from openpype.client import ( get_project, get_assets, get_asset_by_id, - get_asset_by_name + get_asset_by_name, ) from openpype.pipeline import AvalonMongoDB from openpype.api import get_project_settings @@ -182,7 +182,7 @@ def update_op_assets( root_folder_docs = get_assets( project_name, asset_names=[entity_parent_folders[-1]], - fields=["_id", "data.root_of"] + fields=["_id", "data.root_of"], ) # NOTE: Not sure why it's checking for entity type? # OP3 does not support multiple assets with same names so type @@ -224,7 +224,7 @@ def update_op_assets( # Item name if item_type == "Asset": item_name = item_doc["name"] - elif item_type == "Shot": + elif item_type == "Shot": # Name with parents hierarchy "({episode}_){sequence}_{shot}" # to avoid duplicate name issue item_name = "_".join(item_data["parents"] + [item_doc["name"]]) From 2bf3cf1b3094bb7402e7e3a8de55267fd4440edf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?F=C3=A9lix=20David?= Date: Tue, 19 Jul 2022 14:54:36 +0200 Subject: [PATCH 427/785] Fix wrong name for sequence --- openpype/modules/kitsu/utils/update_op_with_zou.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/openpype/modules/kitsu/utils/update_op_with_zou.py b/openpype/modules/kitsu/utils/update_op_with_zou.py index 9a5dd7db61..b2f0caf52b 100644 --- a/openpype/modules/kitsu/utils/update_op_with_zou.py +++ b/openpype/modules/kitsu/utils/update_op_with_zou.py @@ -221,13 +221,12 @@ def update_op_assets( parent_entity = parent_doc["data"]["zou"] parent_zou_id = parent_entity["parent_id"] - # Item name - if item_type == "Asset": - item_name = item_doc["name"] - elif item_type == "Shot": + if item_type in ["Shot", "Sequence"]: # Name with parents hierarchy "({episode}_){sequence}_{shot}" # to avoid duplicate name issue item_name = "_".join(item_data["parents"] + [item_doc["name"]]) + else: + item_name = item_doc["name"] # Set root folders parents item_data["parents"] = entity_parent_folders + item_data["parents"] From c9ad287c7b4521da8c56ccf4d197c3e3befed61f Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 19 Jul 2022 16:40:14 +0200 Subject: [PATCH 428/785] trayp: fix import after develop merge --- openpype/hosts/traypublisher/api/plugin.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/traypublisher/api/plugin.py b/openpype/hosts/traypublisher/api/plugin.py index 0683b149ec..a0c42a55b1 100644 --- a/openpype/hosts/traypublisher/api/plugin.py +++ b/openpype/hosts/traypublisher/api/plugin.py @@ -1,5 +1,5 @@ from openpype.lib.attribute_definitions import FileDef -from openpype.pipeline import ( +from openpype.pipeline.create import ( Creator, HiddenCreator, CreatedInstance From ec7e441cea078bdeccf448b69855fd810a627d0c Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 19 Jul 2022 17:06:27 +0200 Subject: [PATCH 429/785] trayp: changing extension propagation --- .../defaults/project_settings/traypublisher.json | 14 +++----------- .../schema_project_traypublisher.json | 12 ++++++++---- 2 files changed, 11 insertions(+), 15 deletions(-) diff --git a/openpype/settings/defaults/project_settings/traypublisher.json b/openpype/settings/defaults/project_settings/traypublisher.json index c360dc2a13..2cb7d358ed 100644 --- a/openpype/settings/defaults/project_settings/traypublisher.json +++ b/openpype/settings/defaults/project_settings/traypublisher.json @@ -276,27 +276,19 @@ "family": "review", "variant": "Reference", "review": true, - "filter_ext": [ - "mov", - "mp4" - ] + "output_file_type": ".mp4" }, { "family": "plate", "variant": "", "review": false, - "filter_ext": [ - "mov", - "mp4" - ] + "output_file_type": ".mov" }, { "family": "audio", "variant": "", "review": false, - "filter_ext": [ - "wav" - ] + "output_file_type": ".wav" } ] } diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_traypublisher.json b/openpype/settings/entities/schemas/projects_schema/schema_project_traypublisher.json index f77d5fbe06..7c61aeed50 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_traypublisher.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_traypublisher.json @@ -256,10 +256,14 @@ "default": true }, { - "type": "list", - "key": "filter_ext", - "label": "Allowed input file types", - "object_type": "text" + "type": "enum", + "key": "output_file_type", + "label": "Integrating file type", + "enum_items": [ + {".mp4": "MP4"}, + {".mov": "MOV"}, + {".wav": "WAV"} + ] } ] } From 3845c90f95f073657f3a07b0d5df7ebf4e99e8c7 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 19 Jul 2022 17:06:56 +0200 Subject: [PATCH 430/785] trayp: solving an issue with ocio media source --- .../plugins/create/create_editorial.py | 139 ++++++++++++++---- 1 file changed, 114 insertions(+), 25 deletions(-) diff --git a/openpype/hosts/traypublisher/plugins/create/create_editorial.py b/openpype/hosts/traypublisher/plugins/create/create_editorial.py index b87253a705..28e58804c7 100644 --- a/openpype/hosts/traypublisher/plugins/create/create_editorial.py +++ b/openpype/hosts/traypublisher/plugins/create/create_editorial.py @@ -17,6 +17,8 @@ from openpype.hosts.traypublisher.api.editorial import ( from openpype.pipeline import CreatedInstance from openpype.lib import ( + get_ffprobe_data, + FileDef, TextDef, NumberDef, @@ -212,9 +214,16 @@ or updating already created. Publishing will create OTIO file. "fps": fps }) + # get path of sequence + sequence_path_data = pre_create_data["sequence_filepath_data"] + media_path_data = pre_create_data["media_filepaths_data"] + + sequence_path = self._get_path_from_file_data(sequence_path_data) + media_path = self._get_path_from_file_data(media_path_data) + # get otio timeline - otio_timeline = self._create_otio_instance( - subset_name, instance_data, pre_create_data) + otio_timeline = self._create_otio_timeline( + sequence_path, fps) # Create all clip instances clip_instance_properties.update({ @@ -222,43 +231,52 @@ or updating already created. Publishing will create OTIO file. "parent_asset_name": asset_name, "variant": instance_data["variant"] }) + + # create clip instances self._get_clip_instances( otio_timeline, + media_path, clip_instance_properties, family_presets=allowed_family_presets ) - def _create_otio_instance(self, subset_name, data, pre_create_data): - # get path of sequence - file_path_data = pre_create_data["sequence_filepath_data"] - media_path_data = pre_create_data["media_filepaths_data"] - - file_path = self._get_path_from_file_data(file_path_data) - media_path = self._get_path_from_file_data(media_path_data) - - # get editorial sequence file into otio timeline object - extension = os.path.splitext(file_path)[1] - kwargs = {} - if extension == ".edl": - # EDL has no frame rate embedded so needs explicit - # frame rate else 24 is asssumed. - kwargs["rate"] = data["fps"] - - self.log.info(f"kwargs: {kwargs}") - otio_timeline = otio.adapters.read_from_file( - file_path, **kwargs) + # create otio editorial instance + self._create_otio_instance( + subset_name, instance_data, + sequence_path, media_path, + otio_timeline + ) + def _create_otio_instance( + self, + subset_name, + data, + sequence_path, + media_path, + otio_timeline + ): # Pass precreate data to creator attributes data.update({ - "sequenceFilePath": file_path, + "sequenceFilePath": sequence_path, "editorialSourcePath": media_path, "otioTimeline": otio.adapters.write_to_string(otio_timeline) }) self._create_instance(self.family, subset_name, data) - return otio_timeline + def _create_otio_timeline(self, sequence_path, fps): + # get editorial sequence file into otio timeline object + extension = os.path.splitext(sequence_path)[1] + + kwargs = {} + if extension == ".edl": + # EDL has no frame rate embedded so needs explicit + # frame rate else 24 is asssumed. + kwargs["rate"] = fps + + self.log.info(f"kwargs: {kwargs}") + return otio.adapters.read_from_file(sequence_path, **kwargs) def _get_path_from_file_data(self, file_path_data): # TODO: just temporarly solving only one media file @@ -275,6 +293,7 @@ or updating already created. Publishing will create OTIO file. def _get_clip_instances( self, otio_timeline, + media_path, clip_instance_properties, family_presets ): @@ -284,6 +303,9 @@ or updating already created. Publishing will create OTIO file. descended_from_type=otio.schema.Track ) + # media data for audio sream and reference solving + media_data = self._get_media_source_metadata(media_path) + for track in tracks: self.log.debug(f"track.name: {track.name}") try: @@ -298,10 +320,15 @@ or updating already created. Publishing will create OTIO file. self.log.debug(f"track_start_frame: {track_start_frame}") for clip in track.each_child(): - if not self._validate_clip_for_processing(clip): continue + # get available frames info to clip data + self._create_otio_reference(clip, media_path, media_data) + + # convert timeline range to source range + self._restore_otio_source_range(clip) + base_instance_data = self._get_base_instance_data( clip, clip_instance_properties, @@ -326,6 +353,68 @@ or updating already created. Publishing will create OTIO file. ) self.log.debug(f"{pformat(dict(instance.data))}") + def _restore_otio_source_range(self, otio_clip): + otio_clip.source_range = otio_clip.range_in_parent() + + def _create_otio_reference( + self, + otio_clip, + media_path, + media_data + ): + start_frame = media_data["start_frame"] + frame_duration = media_data["duration"] + fps = media_data["fps"] + + available_range = otio.opentime.TimeRange( + start_time=otio.opentime.RationalTime( + start_frame, fps), + duration=otio.opentime.RationalTime( + frame_duration, fps) + ) + # in case old OTIO or video file create `ExternalReference` + media_reference = otio.schema.ExternalReference( + target_url=media_path, + available_range=available_range + ) + + otio_clip.media_reference = media_reference + + def _get_media_source_metadata(self, full_input_path_single_file): + return_data = {} + + try: + media_data = get_ffprobe_data( + full_input_path_single_file, self.log + ) + self.log.debug(f"__ media_data: {pformat(media_data)}") + + # get video stream data + video_stream = media_data["streams"][0] + return_data = { + "video": True, + "start_frame": 0, + "duration": int(video_stream["nb_frames"]), + "fps": float(video_stream["r_frame_rate"][:-2]) + } + + # get audio streams data + audio_stream = [ + stream for stream in media_data["streams"] + if stream["codec_type"] == "audio" + ] + + if audio_stream: + return_data["audio"] = True + + except Exception as exc: + raise AssertionError(( + "FFprobe couldn't read information about input file: " + f"\"{full_input_path_single_file}\". Error message: {exc}" + )) + + return return_data + def _make_subset_instance( self, clip, @@ -355,7 +444,7 @@ or updating already created. Publishing will create OTIO file. else: # add review family if defined future_instance_data.update({ - "filterExt": _fpreset["filter_ext"], + "outputFileType": _fpreset["output_file_type"], "parent_instance_id": parenting_data["instance_id"], "creator_attributes": { "parent_instance": parenting_data["instance_label"] From 968cbe8b984304769be9730dd3cff2633db00a7e Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 19 Jul 2022 17:07:18 +0200 Subject: [PATCH 431/785] removing plugin which will not be needed --- .../publish/collect_editorial_resources.py | 271 ------------------ 1 file changed, 271 deletions(-) delete mode 100644 openpype/hosts/traypublisher/plugins/publish/collect_editorial_resources.py diff --git a/openpype/hosts/traypublisher/plugins/publish/collect_editorial_resources.py b/openpype/hosts/traypublisher/plugins/publish/collect_editorial_resources.py deleted file mode 100644 index 33a852e7a5..0000000000 --- a/openpype/hosts/traypublisher/plugins/publish/collect_editorial_resources.py +++ /dev/null @@ -1,271 +0,0 @@ -import os -import re -import tempfile -import pyblish.api -from copy import deepcopy -import clique - - -class CollectInstanceResources(pyblish.api.InstancePlugin): - """Collect instance's resources""" - - # must be after `CollectInstances` - order = pyblish.api.CollectorOrder - label = "Collect Editorial Resources" - hosts = ["standalonepublisher"] - families = ["clip"] - - def process(self, instance): - self.context = instance.context - self.log.info(f"Processing instance: {instance}") - self.new_instances = [] - subset_files = dict() - subset_dirs = list() - anatomy = self.context.data["anatomy"] - anatomy_data = deepcopy(self.context.data["anatomyData"]) - anatomy_data.update({"root": anatomy.roots}) - - subset = instance.data["subset"] - clip_name = instance.data["clipName"] - - editorial_source_root = instance.data["editorialSourceRoot"] - editorial_source_path = instance.data["editorialSourcePath"] - - # if `editorial_source_path` then loop through - if editorial_source_path: - # add family if mov or mp4 found which is longer for - # cutting `trimming` to enable `ExtractTrimmingVideoAudio` plugin - staging_dir = os.path.normpath( - tempfile.mkdtemp(prefix="pyblish_tmp_") - ) - instance.data["stagingDir"] = staging_dir - instance.data["families"] += ["trimming"] - return - - # if template pattern in path then fill it with `anatomy_data` - if "{" in editorial_source_root: - editorial_source_root = editorial_source_root.format( - **anatomy_data) - - self.log.debug(f"root: {editorial_source_root}") - # loop `editorial_source_root` and find clip name in folders - # and look for any subset name alternatives - for root, dirs, _files in os.walk(editorial_source_root): - # search only for directories related to clip name - correct_clip_dir = None - for _d_search in dirs: - # avoid all non clip dirs - if _d_search not in clip_name: - continue - # found correct dir for clip - correct_clip_dir = _d_search - - # continue if clip dir was not found - if not correct_clip_dir: - continue - - clip_dir_path = os.path.join(root, correct_clip_dir) - subset_files_items = list() - # list content of clip dir and search for subset items - for subset_item in os.listdir(clip_dir_path): - # avoid all items which are not defined as subsets by name - if subset not in subset_item: - continue - - subset_item_path = os.path.join( - clip_dir_path, subset_item) - # if it is dir store it to `subset_dirs` list - if os.path.isdir(subset_item_path): - subset_dirs.append(subset_item_path) - - # if it is file then store it to `subset_files` list - if os.path.isfile(subset_item_path): - subset_files_items.append(subset_item_path) - - if subset_files_items: - subset_files.update({clip_dir_path: subset_files_items}) - - # break the loop if correct_clip_dir was captured - # no need to cary on if correct folder was found - if correct_clip_dir: - break - - if subset_dirs: - # look all dirs and check for subset name alternatives - for _dir in subset_dirs: - instance_data = deepcopy( - {k: v for k, v in instance.data.items()}) - sub_dir = os.path.basename(_dir) - # if subset name is only alternative then create new instance - if sub_dir != subset: - instance_data = self.duplicate_instance( - instance_data, subset, sub_dir) - - # create all representations - self.create_representations( - os.listdir(_dir), instance_data, _dir) - - if sub_dir == subset: - self.new_instances.append(instance_data) - # instance.data.update(instance_data) - - if subset_files: - unique_subset_names = list() - root_dir = list(subset_files.keys()).pop() - files_list = subset_files[root_dir] - search_pattern = f"({subset}[A-Za-z0-9]+)(?=[\\._\\s])" - for _file in files_list: - pattern = re.compile(search_pattern) - match = pattern.findall(_file) - if not match: - continue - match_subset = match.pop() - if match_subset in unique_subset_names: - continue - unique_subset_names.append(match_subset) - - self.log.debug(f"unique_subset_names: {unique_subset_names}") - - for _un_subs in unique_subset_names: - instance_data = self.duplicate_instance( - instance.data, subset, _un_subs) - - # create all representations - self.create_representations( - [os.path.basename(f) for f in files_list - if _un_subs in f], - instance_data, root_dir) - - # remove the original instance as it had been used only - # as template and is duplicated - self.context.remove(instance) - - # create all instances in self.new_instances into context - for new_instance in self.new_instances: - _new_instance = self.context.create_instance( - new_instance["name"]) - _new_instance.data.update(new_instance) - - def duplicate_instance(self, instance_data, subset, new_subset): - - new_instance_data = dict() - for _key, _value in instance_data.items(): - new_instance_data[_key] = _value - if not isinstance(_value, str): - continue - if subset in _value: - new_instance_data[_key] = _value.replace( - subset, new_subset) - - self.log.info(f"Creating new instance: {new_instance_data['name']}") - self.new_instances.append(new_instance_data) - return new_instance_data - - def create_representations( - self, files_list, instance_data, staging_dir): - """ Create representations from Collection object - """ - # collecting frames for later frame start/end reset - frames = list() - # break down Collection object to collections and reminders - collections, remainder = clique.assemble(files_list) - # add staging_dir to instance_data - instance_data["stagingDir"] = staging_dir - # add representations to instance_data - instance_data["representations"] = list() - - collection_head_name = None - # loop through collections and create representations - for _collection in collections: - ext = _collection.tail[1:] - collection_head_name = _collection.head - frame_start = list(_collection.indexes)[0] - frame_end = list(_collection.indexes)[-1] - repre_data = { - "frameStart": frame_start, - "frameEnd": frame_end, - "name": ext, - "ext": ext, - "files": [item for item in _collection], - "stagingDir": staging_dir - } - - if instance_data.get("keepSequence"): - repre_data_keep = deepcopy(repre_data) - instance_data["representations"].append(repre_data_keep) - - if "review" in instance_data["families"]: - repre_data.update({ - "thumbnail": True, - "frameStartFtrack": frame_start, - "frameEndFtrack": frame_end, - "step": 1, - "fps": self.context.data.get("fps"), - "name": "review", - "tags": ["review", "ftrackreview", "delete"], - }) - instance_data["representations"].append(repre_data) - - # add to frames for frame range reset - frames.append(frame_start) - frames.append(frame_end) - - # loop through reminders and create representations - for _reminding_file in remainder: - ext = os.path.splitext(_reminding_file)[-1][1:] - if ext not in instance_data["extensions"]: - continue - if collection_head_name and ( - (collection_head_name + ext) not in _reminding_file - ) and (ext in ["mp4", "mov"]): - self.log.info(f"Skipping file: {_reminding_file}") - continue - frame_start = 1 - frame_end = 1 - - repre_data = { - "name": ext, - "ext": ext, - "files": _reminding_file, - "stagingDir": staging_dir - } - - # exception for thumbnail - if "thumb" in _reminding_file: - repre_data.update({ - 'name': "thumbnail", - 'thumbnail': True - }) - - # exception for mp4 preview - if ext in ["mp4", "mov"]: - frame_start = 0 - frame_end = ( - (instance_data["frameEnd"] - instance_data["frameStart"]) - + 1) - # add review ftrack family into families - for _family in ["review", "ftrack"]: - if _family not in instance_data["families"]: - instance_data["families"].append(_family) - repre_data.update({ - "frameStart": frame_start, - "frameEnd": frame_end, - "frameStartFtrack": frame_start, - "frameEndFtrack": frame_end, - "step": 1, - "fps": self.context.data.get("fps"), - "name": "review", - "thumbnail": True, - "tags": ["review", "ftrackreview", "delete"], - }) - - # add to frames for frame range reset only if no collection - if not collections: - frames.append(frame_start) - frames.append(frame_end) - - instance_data["representations"].append(repre_data) - - # reset frame start / end - instance_data["frameStart"] = min(frames) - instance_data["frameEnd"] = max(frames) From eff02322efb897bb4649130b011dd2ad46a9bb87 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 19 Jul 2022 17:08:16 +0200 Subject: [PATCH 432/785] general: adding traypublisher host --- openpype/plugins/publish/collect_otio_frame_ranges.py | 2 +- openpype/plugins/publish/collect_otio_subset_resources.py | 8 +++----- openpype/plugins/publish/extract_otio_trimming_video.py | 2 +- 3 files changed, 5 insertions(+), 7 deletions(-) diff --git a/openpype/plugins/publish/collect_otio_frame_ranges.py b/openpype/plugins/publish/collect_otio_frame_ranges.py index c86e777850..40e89e29bc 100644 --- a/openpype/plugins/publish/collect_otio_frame_ranges.py +++ b/openpype/plugins/publish/collect_otio_frame_ranges.py @@ -23,7 +23,7 @@ class CollectOtioFrameRanges(pyblish.api.InstancePlugin): label = "Collect OTIO Frame Ranges" order = pyblish.api.CollectorOrder - 0.08 families = ["shot", "clip"] - hosts = ["resolve", "hiero", "flame"] + hosts = ["resolve", "hiero", "flame", "traypublisher"] def process(self, instance): # get basic variables diff --git a/openpype/plugins/publish/collect_otio_subset_resources.py b/openpype/plugins/publish/collect_otio_subset_resources.py index fc6a9b50f2..ca29b82f4e 100644 --- a/openpype/plugins/publish/collect_otio_subset_resources.py +++ b/openpype/plugins/publish/collect_otio_subset_resources.py @@ -23,7 +23,7 @@ class CollectOtioSubsetResources(pyblish.api.InstancePlugin): label = "Collect OTIO Subset Resources" order = pyblish.api.CollectorOrder - 0.077 families = ["clip"] - hosts = ["resolve", "hiero", "flame"] + hosts = ["resolve", "hiero", "flame", "traypublisher"] def process(self, instance): @@ -198,7 +198,7 @@ class CollectOtioSubsetResources(pyblish.api.InstancePlugin): if kwargs.get("collection"): collection = kwargs.get("collection") - files = [f for f in collection] + files = list(collection) ext = collection.format("{tail}") representation_data.update({ "name": ext[1:], @@ -220,7 +220,5 @@ class CollectOtioSubsetResources(pyblish.api.InstancePlugin): }) if kwargs.get("trim") is True: - representation_data.update({ - "tags": ["trim"] - }) + representation_data["tags"] = ["trim"] return representation_data diff --git a/openpype/plugins/publish/extract_otio_trimming_video.py b/openpype/plugins/publish/extract_otio_trimming_video.py index 19625fa568..46a4056a9d 100644 --- a/openpype/plugins/publish/extract_otio_trimming_video.py +++ b/openpype/plugins/publish/extract_otio_trimming_video.py @@ -20,7 +20,7 @@ class ExtractOTIOTrimmingVideo(openpype.api.Extractor): order = api.ExtractorOrder label = "Extract OTIO trim longer video" families = ["trim"] - hosts = ["resolve", "hiero", "flame"] + hosts = ["resolve", "hiero", "flame", "traypublisher"] def process(self, instance): self.staging_dir = self.staging_dir(instance) From 4bd7d4f43e5fa0ba1a3e7db06867ddd28c52fcd1 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 19 Jul 2022 21:50:33 +0200 Subject: [PATCH 433/785] trayp: adding reivew toggle to instance also add audio family condition for available ffmpeg streams --- .../plugins/create/create_editorial.py | 25 +++++++++++++------ 1 file changed, 18 insertions(+), 7 deletions(-) diff --git a/openpype/hosts/traypublisher/plugins/create/create_editorial.py b/openpype/hosts/traypublisher/plugins/create/create_editorial.py index 28e58804c7..55c4ca76b7 100644 --- a/openpype/hosts/traypublisher/plugins/create/create_editorial.py +++ b/openpype/hosts/traypublisher/plugins/create/create_editorial.py @@ -91,6 +91,15 @@ class EditorialClipInstanceCreatorBase(HiddenTrayPublishCreator): return new_instance + def get_instance_attr_defs(self): + return [ + BoolDef( + "add_review_family", + default=True, + label="Review" + ) + ] + class EditorialShotInstanceCreator(EditorialClipInstanceCreatorBase): identifier = "editorial_shot" @@ -114,7 +123,6 @@ class EditorialShotInstanceCreator(EditorialClipInstanceCreatorBase): attr_defs.extend(CLIP_ATTR_DEFS) return attr_defs - class EditorialPlateInstanceCreator(EditorialClipInstanceCreatorBase): identifier = "editorial_plate" family = "plate" @@ -345,6 +353,13 @@ or updating already created. Publishing will create OTIO file. )) for _fpreset in family_presets: + # exclude audio family if no audio stream + if ( + _fpreset["family"] == "audio" + and not media_data.get("audio") + ): + continue + instance = self._make_subset_instance( clip, _fpreset, @@ -447,12 +462,8 @@ or updating already created. Publishing will create OTIO file. "outputFileType": _fpreset["output_file_type"], "parent_instance_id": parenting_data["instance_id"], "creator_attributes": { - "parent_instance": parenting_data["instance_label"] - }, - "publish_attributes": { - "CollectReviewFamily": { - "add_review_family": _fpreset.get("review") - } + "parent_instance": parenting_data["instance_label"], + "add_review_family": _fpreset.get("review") } }) From 49f67f0aca708d0b60055ccefadd414734159c56 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 19 Jul 2022 21:51:09 +0200 Subject: [PATCH 434/785] trayp: collect reviewable for editorial --- .../publish/collect_editorial_reviewable.py | 30 +++++++++++++++++++ 1 file changed, 30 insertions(+) create mode 100644 openpype/hosts/traypublisher/plugins/publish/collect_editorial_reviewable.py diff --git a/openpype/hosts/traypublisher/plugins/publish/collect_editorial_reviewable.py b/openpype/hosts/traypublisher/plugins/publish/collect_editorial_reviewable.py new file mode 100644 index 0000000000..6cd8c42546 --- /dev/null +++ b/openpype/hosts/traypublisher/plugins/publish/collect_editorial_reviewable.py @@ -0,0 +1,30 @@ +import os + +import pyblish.api + + +class CollectEditorialReviewable(pyblish.api.InstancePlugin): + """Collect reviwiewable toggle to instance and representation data + """ + + label = "Collect Editorial Reviewable" + order = pyblish.api.CollectorOrder + + families = ["plate", "review", "audio"] + hosts = ["traypublisher"] + + def process(self, instance): + creator_identifier = instance.data["creator_identifier"] + if "editorial" not in creator_identifier: + return + + creator_attributes = instance.data["creator_attributes"] + repre = instance.data["representations"][0] + + if creator_attributes["add_review_family"]: + repre["tags"].append("review") + instance.data["families"].append("review") + + instance.data["representations"] = [repre] + + self.log.debug("instance.data {}".format(instance.data)) From 0c95e86ccc2735c25d3a5d9bcd31a62083a7ce67 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 19 Jul 2022 21:51:40 +0200 Subject: [PATCH 435/785] trayp: add more keys to sync between editorial instances --- .../traypublisher/plugins/publish/collect_shot_instances.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/traypublisher/plugins/publish/collect_shot_instances.py b/openpype/hosts/traypublisher/plugins/publish/collect_shot_instances.py index 5abafa498d..86505f76c5 100644 --- a/openpype/hosts/traypublisher/plugins/publish/collect_shot_instances.py +++ b/openpype/hosts/traypublisher/plugins/publish/collect_shot_instances.py @@ -15,12 +15,16 @@ class CollectShotInstance(pyblish.api.InstancePlugin): SHARED_KEYS = [ "asset", "fps", + "handleStart", + "handleEnd", "frameStart", "frameEnd", "clipIn", "clipOut", "sourceIn", - "sourceOut" + "sourceOut", + "otioClip", + "workfileFrameStart" ] def process(self, instance): From 5940fd0937941ba0fea391be0889bfa86bedc806 Mon Sep 17 00:00:00 2001 From: OpenPype Date: Wed, 20 Jul 2022 03:58:48 +0000 Subject: [PATCH 436/785] [Automated] Bump version --- CHANGELOG.md | 15 ++++++++------- openpype/version.py | 2 +- pyproject.toml | 2 +- 3 files changed, 10 insertions(+), 9 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 95427e9ea9..e8da885473 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,23 +1,29 @@ # Changelog -## [3.12.2-nightly.1](https://github.com/pypeclub/OpenPype/tree/HEAD) +## [3.12.2-nightly.2](https://github.com/pypeclub/OpenPype/tree/HEAD) [Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.12.1...HEAD) **🚀 Enhancements** +- General: Interactive console in cli [\#3526](https://github.com/pypeclub/OpenPype/pull/3526) - Ftrack: Automatic daily review session creation can define trigger hour [\#3516](https://github.com/pypeclub/OpenPype/pull/3516) - Ftrack: add source into Note [\#3509](https://github.com/pypeclub/OpenPype/pull/3509) - Ftrack: Trigger custom ftrack topic of project structure creation [\#3506](https://github.com/pypeclub/OpenPype/pull/3506) - Settings UI: Add extract to file action on project view [\#3505](https://github.com/pypeclub/OpenPype/pull/3505) +- Add pack and unpack convenience scripts [\#3502](https://github.com/pypeclub/OpenPype/pull/3502) - General: Event system [\#3499](https://github.com/pypeclub/OpenPype/pull/3499) - NewPublisher: Keep plugins with mismatch target in report [\#3498](https://github.com/pypeclub/OpenPype/pull/3498) - Nuke: load clip with options from settings [\#3497](https://github.com/pypeclub/OpenPype/pull/3497) +- TrayPublisher: implemented render\_mov\_batch [\#3486](https://github.com/pypeclub/OpenPype/pull/3486) - Migrate basic families to the new Tray Publisher [\#3469](https://github.com/pypeclub/OpenPype/pull/3469) **🐛 Bug fixes** +- Additional fixes for powershell scripts [\#3525](https://github.com/pypeclub/OpenPype/pull/3525) +- Maya: Added wrapper around cmds.setAttr [\#3523](https://github.com/pypeclub/OpenPype/pull/3523) - General: Fix hash of centos oiio archive [\#3519](https://github.com/pypeclub/OpenPype/pull/3519) +- Maya: Renderman display output fix [\#3514](https://github.com/pypeclub/OpenPype/pull/3514) - TrayPublisher: Simple creation enhancements and fixes [\#3513](https://github.com/pypeclub/OpenPype/pull/3513) - NewPublisher: Publish attributes are properly collected [\#3510](https://github.com/pypeclub/OpenPype/pull/3510) - TrayPublisher: Make sure host name is filled [\#3504](https://github.com/pypeclub/OpenPype/pull/3504) @@ -25,6 +31,7 @@ **🔀 Refactored code** +- General: Client docstrings cleanup [\#3529](https://github.com/pypeclub/OpenPype/pull/3529) - TimersManager: Use query functions [\#3495](https://github.com/pypeclub/OpenPype/pull/3495) ## [3.12.1](https://github.com/pypeclub/OpenPype/tree/3.12.1) (2022-07-13) @@ -51,7 +58,6 @@ - Blender: Bugfix - Set fps properly on open [\#3426](https://github.com/pypeclub/OpenPype/pull/3426) - Hiero: Add custom scripts menu [\#3425](https://github.com/pypeclub/OpenPype/pull/3425) - Blender: pre pyside install for all platforms [\#3400](https://github.com/pypeclub/OpenPype/pull/3400) -- Maya: Add additional playblast options to review Extractor. [\#3384](https://github.com/pypeclub/OpenPype/pull/3384) **🐛 Bug fixes** @@ -71,7 +77,6 @@ - Maya: fix hashing in Python 3 for tile rendering [\#3447](https://github.com/pypeclub/OpenPype/pull/3447) - LogViewer: Escape html characters in log message [\#3443](https://github.com/pypeclub/OpenPype/pull/3443) - Nuke: Slate frame is integrated [\#3427](https://github.com/pypeclub/OpenPype/pull/3427) -- Maya: Camera extra data - additional fix for \#3304 [\#3386](https://github.com/pypeclub/OpenPype/pull/3386) **🔀 Refactored code** @@ -85,8 +90,6 @@ - General: Move publish plugin and publish render abstractions [\#3442](https://github.com/pypeclub/OpenPype/pull/3442) - General: Use Anatomy after move to pipeline [\#3436](https://github.com/pypeclub/OpenPype/pull/3436) - General: Anatomy moved to pipeline [\#3435](https://github.com/pypeclub/OpenPype/pull/3435) -- Fusion: Use client query functions [\#3380](https://github.com/pypeclub/OpenPype/pull/3380) -- Resolve: Use client query functions [\#3379](https://github.com/pypeclub/OpenPype/pull/3379) ## [3.12.0](https://github.com/pypeclub/OpenPype/tree/3.12.0) (2022-06-28) @@ -111,7 +114,6 @@ - Nuke: Collect representation files based on Write [\#3407](https://github.com/pypeclub/OpenPype/pull/3407) - General: Filter representations before integration start [\#3398](https://github.com/pypeclub/OpenPype/pull/3398) - Maya: look collector typo [\#3392](https://github.com/pypeclub/OpenPype/pull/3392) -- Maya: vray device aspect ratio fix [\#3381](https://github.com/pypeclub/OpenPype/pull/3381) **🔀 Refactored code** @@ -121,7 +123,6 @@ - Houdini: Use client query functions [\#3395](https://github.com/pypeclub/OpenPype/pull/3395) - Hiero: Use client query functions [\#3393](https://github.com/pypeclub/OpenPype/pull/3393) - Nuke: Use client query functions [\#3391](https://github.com/pypeclub/OpenPype/pull/3391) -- Maya: Use client query functions [\#3385](https://github.com/pypeclub/OpenPype/pull/3385) ## [3.11.1](https://github.com/pypeclub/OpenPype/tree/3.11.1) (2022-06-20) diff --git a/openpype/version.py b/openpype/version.py index e9206379e1..dd5ad97449 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.12.2-nightly.1" +__version__ = "3.12.2-nightly.2" diff --git a/pyproject.toml b/pyproject.toml index 19d65b50f9..9552242694 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "OpenPype" -version = "3.12.2-nightly.1" # OpenPype +version = "3.12.2-nightly.2" # OpenPype description = "Open VFX and Animation pipeline with support." authors = ["OpenPype Team "] license = "MIT License" From fca8030092e9e9fd165d2bdf2039b662be218499 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 20 Jul 2022 09:47:54 +0200 Subject: [PATCH 437/785] normalize path from get_workdir where needed --- .../hosts/flame/plugins/publish/integrate_batch_group.py | 6 ++++-- openpype/lib/applications.py | 1 + 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/flame/plugins/publish/integrate_batch_group.py b/openpype/hosts/flame/plugins/publish/integrate_batch_group.py index da9553cc2a..032de99540 100644 --- a/openpype/hosts/flame/plugins/publish/integrate_batch_group.py +++ b/openpype/hosts/flame/plugins/publish/integrate_batch_group.py @@ -324,5 +324,7 @@ class IntegrateBatchGroup(pyblish.api.InstancePlugin): project_doc = instance.data["projectEntity"] asset_entity = instance.data["assetEntity"] - return get_workdir( - project_doc, asset_entity, task_data["name"], "flame") + workdir = get_workdir( + project_doc, asset_entity, task_data["name"], "flame" + ) + return os.path.normpath(workdir) diff --git a/openpype/lib/applications.py b/openpype/lib/applications.py index f46197e15f..dafc3b479b 100644 --- a/openpype/lib/applications.py +++ b/openpype/lib/applications.py @@ -1638,6 +1638,7 @@ def prepare_context_environments(data, env_group=None): "Error in anatomy.format: {}".format(str(exc)) ) + workdir = os.path.normpath(workdir) if not os.path.exists(workdir): log.debug( "Creating workdir folder: \"{}\"".format(workdir) From 37a02fe8e19e4faeaa2a63e3cced19efe5aa1fce Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 20 Jul 2022 09:48:21 +0200 Subject: [PATCH 438/785] 'get_workdir_with_workdir_data' returns 'TemplateResult' --- openpype/lib/avalon_context.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/openpype/lib/avalon_context.py b/openpype/lib/avalon_context.py index 76ed6cbbd3..605ebb0b99 100644 --- a/openpype/lib/avalon_context.py +++ b/openpype/lib/avalon_context.py @@ -582,10 +582,7 @@ def get_workdir_with_workdir_data( anatomy_filled = anatomy.format(workdir_data) # Output is TemplateResult object which contain useful data - path = anatomy_filled[template_key]["folder"] - if path: - path = os.path.normpath(path) - return path + return anatomy_filled[template_key]["folder"] def get_workdir( From afd26a82e376a6445177d63d7769394c22b5efa6 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 20 Jul 2022 09:48:42 +0200 Subject: [PATCH 439/785] pass anatomy to 'get_workdir' in flame integrator --- openpype/hosts/flame/plugins/publish/integrate_batch_group.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/flame/plugins/publish/integrate_batch_group.py b/openpype/hosts/flame/plugins/publish/integrate_batch_group.py index 032de99540..bf6e81523d 100644 --- a/openpype/hosts/flame/plugins/publish/integrate_batch_group.py +++ b/openpype/hosts/flame/plugins/publish/integrate_batch_group.py @@ -323,8 +323,9 @@ class IntegrateBatchGroup(pyblish.api.InstancePlugin): def _get_shot_task_dir_path(self, instance, task_data): project_doc = instance.data["projectEntity"] asset_entity = instance.data["assetEntity"] + anatomy = instance.context.data["anatomy"] workdir = get_workdir( - project_doc, asset_entity, task_data["name"], "flame" + project_doc, asset_entity, task_data["name"], "flame", anatomy ) return os.path.normpath(workdir) From 5963066a7e1ba2849092215697470e7e0ae734bd Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 20 Jul 2022 10:11:56 +0200 Subject: [PATCH 440/785] TemplateResult has 'normalized' method --- openpype/lib/path_templates.py | 13 +++++++++++++ openpype/pipeline/anatomy.py | 13 +++++++++++++ 2 files changed, 26 insertions(+) diff --git a/openpype/lib/path_templates.py b/openpype/lib/path_templates.py index 5c40aa4549..c1282016ef 100644 --- a/openpype/lib/path_templates.py +++ b/openpype/lib/path_templates.py @@ -409,6 +409,19 @@ class TemplateResult(str): self.invalid_types ) + def normalized(self): + """Convert to normalized path.""" + + cls = self.__class__ + return cls( + os.path.normpath(self), + self.template, + self.solved, + self.used_values, + self.missing_keys, + self.invalid_types + ) + class TemplatesResultDict(dict): """Holds and wrap TemplateResults for easy bug report.""" diff --git a/openpype/pipeline/anatomy.py b/openpype/pipeline/anatomy.py index 73081f18fb..08db4749b3 100644 --- a/openpype/pipeline/anatomy.py +++ b/openpype/pipeline/anatomy.py @@ -380,6 +380,19 @@ class AnatomyTemplateResult(TemplateResult): ) return self.__class__(tmp, self.rootless) + def normalized(self): + """Convert to normalized path.""" + + tmp = TemplateResult( + os.path.normpath(self), + self.template, + self.solved, + self.used_values, + self.missing_keys, + self.invalid_types + ) + return self.__class__(tmp, self.rootless) + class AnatomyTemplates(TemplatesDict): inner_key_pattern = re.compile(r"(\{@.*?[^{}0]*\})") From ab34b6f1cf1b1e56b770dcac4191ca69c3a58a5b Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 20 Jul 2022 10:12:56 +0200 Subject: [PATCH 441/785] use normalized 'TemplateResult' output in 'get_workdir_with_workdir_data' --- .../hosts/flame/plugins/publish/integrate_batch_group.py | 3 +-- openpype/lib/applications.py | 1 - openpype/lib/avalon_context.py | 5 ++++- 3 files changed, 5 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/flame/plugins/publish/integrate_batch_group.py b/openpype/hosts/flame/plugins/publish/integrate_batch_group.py index bf6e81523d..b59107f155 100644 --- a/openpype/hosts/flame/plugins/publish/integrate_batch_group.py +++ b/openpype/hosts/flame/plugins/publish/integrate_batch_group.py @@ -325,7 +325,6 @@ class IntegrateBatchGroup(pyblish.api.InstancePlugin): asset_entity = instance.data["assetEntity"] anatomy = instance.context.data["anatomy"] - workdir = get_workdir( + return get_workdir( project_doc, asset_entity, task_data["name"], "flame", anatomy ) - return os.path.normpath(workdir) diff --git a/openpype/lib/applications.py b/openpype/lib/applications.py index dafc3b479b..f46197e15f 100644 --- a/openpype/lib/applications.py +++ b/openpype/lib/applications.py @@ -1638,7 +1638,6 @@ def prepare_context_environments(data, env_group=None): "Error in anatomy.format: {}".format(str(exc)) ) - workdir = os.path.normpath(workdir) if not os.path.exists(workdir): log.debug( "Creating workdir folder: \"{}\"".format(workdir) diff --git a/openpype/lib/avalon_context.py b/openpype/lib/avalon_context.py index 605ebb0b99..2944b2506e 100644 --- a/openpype/lib/avalon_context.py +++ b/openpype/lib/avalon_context.py @@ -582,7 +582,10 @@ def get_workdir_with_workdir_data( anatomy_filled = anatomy.format(workdir_data) # Output is TemplateResult object which contain useful data - return anatomy_filled[template_key]["folder"] + output = anatomy_filled[template_key]["folder"] + if output: + return output.normalized() + return output def get_workdir( From 6ed18c6250e083a827be395cfc9746ffe7bdd2fa Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 20 Jul 2022 13:13:23 +0200 Subject: [PATCH 442/785] stop context timer when context is set --- openpype/tools/workfiles/window.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/tools/workfiles/window.py b/openpype/tools/workfiles/window.py index c1efe026f2..f5edff2fa5 100644 --- a/openpype/tools/workfiles/window.py +++ b/openpype/tools/workfiles/window.py @@ -331,6 +331,7 @@ class Window(QtWidgets.QMainWindow): if self.assets_widget.refreshing: return + self._set_context_timer.stop() self._context_to_set, context = None, self._context_to_set if "asset" in context: asset_doc = get_asset_by_name( From 58611f89bda27a745defb77a54fe27ff3555ffb7 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 20 Jul 2022 13:15:12 +0200 Subject: [PATCH 443/785] global function 'show_workfiles' just pass args and kwargs --- openpype/tools/utils/host_tools.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/tools/utils/host_tools.py b/openpype/tools/utils/host_tools.py index ae23e4d089..41c4478d57 100644 --- a/openpype/tools/utils/host_tools.py +++ b/openpype/tools/utils/host_tools.py @@ -395,9 +395,9 @@ def show_tool_by_name(tool_name, parent=None, *args, **kwargs): _SingletonPoint.show_tool_by_name(tool_name, parent, *args, **kwargs) -def show_workfiles(parent=None, use_context=None, save=None): +def show_workfiles(*args, **kwargs): _SingletonPoint.show_tool_by_name( - "workfiles", parent, use_context=use_context, save=save + "workfiles", *args, **kwargs ) From 8e4627b44dd6e1a667b742415c6d00946dba3220 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 20 Jul 2022 13:15:37 +0200 Subject: [PATCH 444/785] set icon of workfiles tool --- openpype/tools/workfiles/window.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/openpype/tools/workfiles/window.py b/openpype/tools/workfiles/window.py index f5edff2fa5..e86b716765 100644 --- a/openpype/tools/workfiles/window.py +++ b/openpype/tools/workfiles/window.py @@ -1,6 +1,6 @@ import os import datetime -from Qt import QtCore, QtWidgets +from Qt import QtCore, QtWidgets, QtGui from openpype.client import ( get_asset_by_id, @@ -8,6 +8,7 @@ from openpype.client import ( get_workfile_info, ) from openpype import style +from openpype import resources from openpype.lib import ( create_workfile_doc, save_workfile_data_to_doc, @@ -153,6 +154,8 @@ class Window(QtWidgets.QMainWindow): if not parent: window_flags |= QtCore.Qt.WindowStaysOnTopHint self.setWindowFlags(window_flags) + icon = QtGui.QIcon(resources.get_openpype_icon_filepath()) + self.setWindowIcon(icon) # Create pages widget and set it as central widget pages_widget = QtWidgets.QStackedWidget(self) From 55ff302d21a49b13281972129a944adcb998bf89 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 20 Jul 2022 13:15:58 +0200 Subject: [PATCH 445/785] changed workfiles window to QWidget --- openpype/tools/workfiles/window.py | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/openpype/tools/workfiles/window.py b/openpype/tools/workfiles/window.py index e86b716765..588daf069f 100644 --- a/openpype/tools/workfiles/window.py +++ b/openpype/tools/workfiles/window.py @@ -143,23 +143,19 @@ class SidePanelWidget(QtWidgets.QWidget): return self._workfile_doc, data -class Window(QtWidgets.QMainWindow): +class Window(QtWidgets.QWidget): """Work Files Window""" title = "Work Files" def __init__(self, parent=None): super(Window, self).__init__(parent=parent) self.setWindowTitle(self.title) - window_flags = QtCore.Qt.Window | QtCore.Qt.WindowCloseButtonHint - if not parent: - window_flags |= QtCore.Qt.WindowStaysOnTopHint - self.setWindowFlags(window_flags) icon = QtGui.QIcon(resources.get_openpype_icon_filepath()) self.setWindowIcon(icon) + self.setWindowFlags(self.windowFlags() | QtCore.Qt.Window) # Create pages widget and set it as central widget pages_widget = QtWidgets.QStackedWidget(self) - self.setCentralWidget(pages_widget) home_page_widget = QtWidgets.QWidget(pages_widget) home_body_widget = QtWidgets.QWidget(home_page_widget) @@ -194,6 +190,9 @@ class Window(QtWidgets.QMainWindow): # the files widget has a filter field which tasks does not. tasks_widget.setContentsMargins(0, 32, 0, 0) + main_layout = QtWidgets.QHBoxLayout(self) + main_layout.addWidget(pages_widget, 1) + # Set context after asset widget is refreshed # - to do so it is necessary to wait until refresh is done set_context_timer = QtCore.QTimer() From 3fac738361abb9446424fcb244d9768c2706f070 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 20 Jul 2022 13:16:15 +0200 Subject: [PATCH 446/785] show workfile just calls ensure visible on workfiles tool --- openpype/tools/utils/host_tools.py | 25 +++-------------- openpype/tools/workfiles/window.py | 43 ++++++++++++++++++++++++++++++ 2 files changed, 47 insertions(+), 21 deletions(-) diff --git a/openpype/tools/utils/host_tools.py b/openpype/tools/utils/host_tools.py index 41c4478d57..52d15a59f7 100644 --- a/openpype/tools/utils/host_tools.py +++ b/openpype/tools/utils/host_tools.py @@ -60,31 +60,14 @@ class HostToolsHelper: return self._workfiles_tool - def show_workfiles(self, parent=None, use_context=None, save=None): + def show_workfiles( + self, parent=None, use_context=None, save=None, on_top=None + ): """Workfiles tool for changing context and saving workfiles.""" - if use_context is None: - use_context = True - - if save is None: - save = True with qt_app_context(): workfiles_tool = self.get_workfiles_tool(parent) - workfiles_tool.set_save_enabled(save) - - if not workfiles_tool.isVisible(): - workfiles_tool.show() - - if use_context: - context = { - "asset": legacy_io.Session["AVALON_ASSET"], - "task": legacy_io.Session["AVALON_TASK"] - } - workfiles_tool.set_context(context) - - # Pull window to the front. - workfiles_tool.raise_() - workfiles_tool.activateWindow() + workfiles_tool.ensure_visible(use_context, save, on_top) def get_loader_tool(self, parent): """Create, cache and return loader tool window.""" diff --git a/openpype/tools/workfiles/window.py b/openpype/tools/workfiles/window.py index 588daf069f..0b0d67e589 100644 --- a/openpype/tools/workfiles/window.py +++ b/openpype/tools/workfiles/window.py @@ -229,6 +229,49 @@ class Window(QtWidgets.QWidget): self._first_show = True self._context_to_set = None + def ensure_visible( + self, use_context=None, save=None, on_top=None + ): + if save is None: + save = True + + self.set_save_enabled(save) + + if self.isVisible(): + use_context = False + elif use_context is None: + use_context = True + + if on_top is None and self._first_show: + on_top = self.parent() is None + + window_flags = self.windowFlags() + new_window_flags = window_flags + if on_top is True: + new_window_flags = window_flags | QtCore.Qt.WindowStaysOnTopHint + elif on_top is False: + new_window_flags = window_flags & ~QtCore.Qt.WindowStaysOnTopHint + + if new_window_flags != window_flags: + # Note this is not propagated after initialization of widget in + # some Qt builds + self.setWindowFlags(new_window_flags) + self.show() + + elif not self.isVisible(): + self.show() + + if use_context is None or use_context is True: + context = { + "asset": legacy_io.Session["AVALON_ASSET"], + "task": legacy_io.Session["AVALON_TASK"] + } + self.set_context(context) + + # Pull window to the front. + self.raise_() + self.activateWindow() + @property def project_name(self): return legacy_io.Session["AVALON_PROJECT"] From b128e98c8f6269a779ec87a66d35cf8b4d8bf6d1 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 20 Jul 2022 13:16:57 +0200 Subject: [PATCH 447/785] nuke does not pass parent widget for workfiles tool and tell it to change on top flag --- openpype/hosts/nuke/api/lib.py | 10 ++++++---- openpype/hosts/nuke/api/pipeline.py | 10 +++++++++- 2 files changed, 15 insertions(+), 5 deletions(-) diff --git a/openpype/hosts/nuke/api/lib.py b/openpype/hosts/nuke/api/lib.py index 0929415c00..9b24c9fb38 100644 --- a/openpype/hosts/nuke/api/lib.py +++ b/openpype/hosts/nuke/api/lib.py @@ -2440,10 +2440,12 @@ def _launch_workfile_app(): if starting_up or closing_down: return - from .pipeline import get_main_window - - main_window = get_main_window() - host_tools.show_workfiles(parent=main_window) + # Make sure on top is enabled on first show so the window is not hidden + # under main nuke window + # - this happened on Centos 7 and it is because the focus of nuke + # changes to the main window after showing because of initialization + # which moves workfiles tool under it + host_tools.show_workfiles(parent=None, on_top=True) def process_workfile_builder(): diff --git a/openpype/hosts/nuke/api/pipeline.py b/openpype/hosts/nuke/api/pipeline.py index 2e3621ba8f..0afc56d2f7 100644 --- a/openpype/hosts/nuke/api/pipeline.py +++ b/openpype/hosts/nuke/api/pipeline.py @@ -142,6 +142,14 @@ def uninstall(): _uninstall_menu() +def _show_workfiles(): + # Make sure parent is not set + # - this makes Workfiles tool as separated window which + # avoid issues with reopening + # - it is possible to explicitly change on top flag of the tool + host_tools.show_workfiles(parent=None, on_top=False) + + def _install_menu(): # uninstall original avalon menu main_window = get_main_window() @@ -158,7 +166,7 @@ def _install_menu(): menu.addSeparator() menu.addCommand( "Work Files...", - lambda: host_tools.show_workfiles(parent=main_window) + _show_workfiles ) menu.addSeparator() From 037ed71f60eba9d5947aef8c973e8b791596b942 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 20 Jul 2022 13:52:15 +0200 Subject: [PATCH 448/785] keep subset group template settings but mark them as deprecated with hint where to move the value --- .../defaults/project_settings/global.json | 9 ++++ .../schemas/schema_global_publish.json | 46 +++++++++++++++++++ 2 files changed, 55 insertions(+) diff --git a/openpype/settings/defaults/project_settings/global.json b/openpype/settings/defaults/project_settings/global.json index 9247c6ceb6..e509db2791 100644 --- a/openpype/settings/defaults/project_settings/global.json +++ b/openpype/settings/defaults/project_settings/global.json @@ -171,6 +171,15 @@ ] }, "IntegrateAssetNew": { + "subset_grouping_profiles": [ + { + "families": [], + "hosts": [], + "task_types": [], + "tasks": [], + "template": "" + } + ], "template_name_profiles": [ { "families": [], diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json index af08bbec3c..b9d0b7daba 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json @@ -587,6 +587,52 @@ "label": "IntegrateAsset (Legacy)", "is_group": true, "children": [ + { + "type": "label", + "label": "NOTE: Subset grouping profiles settings were moved to
Integrate Subset Group. Please move values there." + }, + { + "type": "list", + "key": "subset_grouping_profiles", + "label": "Subset grouping profiles (DEPRECATED)", + "use_label_wrap": true, + "object_type": { + "type": "dict", + "children": [ + { + "key": "families", + "label": "Families", + "type": "list", + "object_type": "text" + }, + { + "type": "hosts-enum", + "key": "hosts", + "label": "Hosts", + "multiselection": true + }, + { + "key": "task_types", + "label": "Task types", + "type": "task-types-enum" + }, + { + "key": "tasks", + "label": "Task names", + "type": "list", + "object_type": "text" + }, + { + "type": "separator" + }, + { + "type": "text", + "key": "template", + "label": "Template" + } + ] + } + }, { "type": "list", "key": "template_name_profiles", From bedca7eaf98212d1a5450b097029d25dfdaf4d02 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Wed, 20 Jul 2022 16:30:03 +0200 Subject: [PATCH 449/785] :wrench: add relevant Maya validators to Settings add missing validators and add ability to set them optional if needed --- .../validate_review_subset_uniqueness.py | 4 +- .../plugins/publish/validate_setdress_root.py | 3 +- .../defaults/project_settings/maya.json | 178 +++++++++++++++++- .../schemas/schema_maya_publish.json | 155 +++++++++++++++ 4 files changed, 329 insertions(+), 11 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/validate_review_subset_uniqueness.py b/openpype/hosts/maya/plugins/publish/validate_review_subset_uniqueness.py index d70096ee45..04cc9ab5fb 100644 --- a/openpype/hosts/maya/plugins/publish/validate_review_subset_uniqueness.py +++ b/openpype/hosts/maya/plugins/publish/validate_review_subset_uniqueness.py @@ -6,7 +6,7 @@ from openpype.pipeline import PublishXmlValidationError class ValidateReviewSubsetUniqueness(pyblish.api.ContextPlugin): - """Validates that nodes has common root.""" + """Validates that review subset has unique name.""" order = openpype.api.ValidateContentsOrder hosts = ["maya"] @@ -17,7 +17,7 @@ class ValidateReviewSubsetUniqueness(pyblish.api.ContextPlugin): subset_names = [] for instance in context: - self.log.info("instance:: {}".format(instance.data)) + self.log.debug("Instance: {}".format(instance.data)) if instance.data.get('publish'): subset_names.append(instance.data.get('subset')) diff --git a/openpype/hosts/maya/plugins/publish/validate_setdress_root.py b/openpype/hosts/maya/plugins/publish/validate_setdress_root.py index 0b4842d208..8e23a7c04f 100644 --- a/openpype/hosts/maya/plugins/publish/validate_setdress_root.py +++ b/openpype/hosts/maya/plugins/publish/validate_setdress_root.py @@ -4,8 +4,7 @@ import openpype.api class ValidateSetdressRoot(pyblish.api.InstancePlugin): - """ - """ + """Validate if set dress top root node is published.""" order = openpype.api.ValidateContentsOrder label = "SetDress Root" diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index 5976c6a823..c96acbff6d 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -205,10 +205,15 @@ "enabled": true, "optional": true, "active": true, - "exclude_families": ["model", "rig", "staticMesh"] + "exclude_families": [ + "model", + "rig", + "staticMesh" + ] }, "ValidateShaderName": { "enabled": false, + "optional": true, "regex": "(?P.*)_(.*)_SHD" }, "ValidateShadingEngine": { @@ -222,6 +227,7 @@ }, "ValidateLoadedPlugin": { "enabled": false, + "optional": true, "whitelist_native_plugins": false, "authorized_plugins": [] }, @@ -236,6 +242,7 @@ }, "ValidateUnrealStaticMeshName": { "enabled": true, + "optional": true, "validate_mesh": false, "validate_collision": true }, @@ -252,6 +259,81 @@ "redshift_render_attributes": [], "renderman_render_attributes": [] }, + "ValidateCurrentRenderLayerIsRenderable": { + "enabled": true, + "optional": false, + "active": true + }, + "ValidateRenderImageRule": { + "enabled": true, + "optional": false, + "active": true + }, + "ValidateRenderNoDefaultCameras": { + "enabled": true, + "optional": false, + "active": true + }, + "ValidateRenderSingleCamera": { + "enabled": true, + "optional": false, + "active": true + }, + "ValidateRenderLayerAOVs": { + "enabled": true, + "optional": false, + "active": true + }, + "ValidateStepSize": { + "enabled": true, + "optional": false, + "active": true + }, + "ValidateVRayDistributedRendering": { + "enabled": true, + "optional": false, + "active": true + }, + "ValidateVrayReferencedAOVs": { + "enabled": true, + "optional": false, + "active": true + }, + "ValidateVRayTranslatorEnabled": { + "enabled": true, + "optional": false, + "active": true + }, + "ValidateVrayProxy": { + "enabled": true, + "optional": false, + "active": true + }, + "ValidateVrayProxyMembers": { + "enabled": true, + "optional": false, + "active": true + }, + "ValidateYetiRenderScriptCallbacks": { + "enabled": true, + "optional": false, + "active": true + }, + "ValidateYetiRigCacheState": { + "enabled": true, + "optional": false, + "active": true + }, + "ValidateYetiRigInputShapesInInstance": { + "enabled": true, + "optional": false, + "active": true + }, + "ValidateYetiRigSettings": { + "enabled": true, + "optional": false, + "active": true + }, "ValidateModelName": { "enabled": false, "database": true, @@ -270,6 +352,7 @@ }, "ValidateTransformNamingSuffix": { "enabled": true, + "optional": true, "SUFFIX_NAMING_TABLE": { "mesh": [ "_GEO", @@ -293,7 +376,7 @@ "ALLOW_IF_NOT_IN_SUFFIX_TABLE": true }, "ValidateColorSets": { - "enabled": false, + "enabled": true, "optional": true, "active": true }, @@ -337,6 +420,16 @@ "optional": true, "active": true }, + "ValidateMeshNoNegativeScale": { + "enabled": true, + "optional": false, + "active": true + }, + "ValidateMeshNonZeroEdgeLength": { + "enabled": true, + "optional": true, + "active": true + }, "ValidateMeshNormalsUnlocked": { "enabled": false, "optional": true, @@ -359,22 +452,22 @@ }, "ValidateNoNamespace": { "enabled": true, - "optional": true, + "optional": false, "active": true }, "ValidateNoNullTransforms": { "enabled": true, - "optional": true, + "optional": false, "active": true }, "ValidateNoUnknownNodes": { "enabled": true, - "optional": true, + "optional": false, "active": true }, "ValidateNodeNoGhosting": { "enabled": false, - "optional": true, + "optional": false, "active": true }, "ValidateShapeDefaultNames": { @@ -402,6 +495,21 @@ "optional": true, "active": true }, + "ValidateNoVRayMesh": { + "enabled": true, + "optional": false, + "active": true + }, + "ValidateUnrealMeshTriangulated": { + "enabled": false, + "optional": true, + "active": true + }, + "ValidateAlembicVisibleOnly": { + "enabled": true, + "optional": false, + "active": true + }, "ExtractAlembic": { "enabled": true, "families": [ @@ -425,8 +533,34 @@ "optional": true, "active": true }, + "ValidateAnimationContent": { + "enabled": true, + "optional": false, + "active": true + }, + "ValidateOutRelatedNodeIds": { + "enabled": true, + "optional": false, + "active": true + }, + "ValidateRigControllersArnoldAttributes": { + "enabled": true, + "optional": false, + "active": true + }, + "ValidateSkeletalMeshHierarchy": { + "enabled": true, + "optional": false, + "active": true + }, + "ValidateSkinclusterDeformerSet": { + "enabled": true, + "optional": false, + "active": true + }, "ValidateRigOutSetNodeIds": { "enabled": true, + "optional": false, "allow_history_only": false }, "ValidateCameraAttributes": { @@ -439,14 +573,44 @@ "optional": true, "active": true }, + "ValidateAssemblyNamespaces": { + "enabled": true, + "optional": false, + "active": true + }, + "ValidateAssemblyModelTransforms": { + "enabled": true, + "optional": false, + "active": true + }, "ValidateAssRelativePaths": { "enabled": true, + "optional": false, + "active": true + }, + "ValidateInstancerContent": { + "enabled": true, + "optional": false, + "active": true + }, + "ValidateInstancerFrameRanges": { + "enabled": true, + "optional": false, + "active": true + }, + "ValidateNoDefaultCameras": { + "enabled": true, + "optional": false, + "active": true + }, + "ValidateUnrealUpAxis": { + "enabled": false, "optional": true, "active": true }, "ValidateCameraContents": { "enabled": true, - "optional": true, + "optional": false, "validate_shapes": true }, "ExtractPlayblast": { diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_publish.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_publish.json index 84182973a1..53247f6bd4 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_publish.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_publish.json @@ -107,6 +107,11 @@ "key": "enabled", "label": "Enabled" }, + { + "type": "boolean", + "key": "optional", + "label": "Optional" + }, { "type": "label", "label": "Shader name regex can use named capture group asset to validate against current asset name.

Example:
^.*(?P=<asset>.+)_SHD

" @@ -159,6 +164,11 @@ "key": "enabled", "label": "Enabled" }, + { + "type": "boolean", + "key": "optional", + "label": "Optional" + }, { "type": "boolean", "key": "whitelist_native_plugins", @@ -246,6 +256,11 @@ "key": "enabled", "label": "Enabled" }, + { + "type": "boolean", + "key": "optional", + "label": "Optional" + }, { "type": "boolean", "key": "validate_mesh", @@ -332,6 +347,72 @@ } ] }, + { + "type": "schema_template", + "name": "template_publish_plugin", + "template_data": [ + { + "key": "ValidateCurrentRenderLayerIsRenderable", + "label": "Validate Current Render Layer Has Renderable Camera" + }, + { + "key": "ValidateRenderImageRule", + "label": "Validate Images File Rule (Workspace)" + }, + { + "key": "ValidateRenderNoDefaultCameras", + "label": "Validate No Default Cameras Renderable" + }, + { + "key": "ValidateRenderSingleCamera", + "label": "Validate Render Single Camera" + }, + { + "key": "ValidateRenderLayerAOVs", + "label": "Validate Render Passes / AOVs Are Registered" + }, + { + "key": "ValidateStepSize", + "label": "Validate Step Size" + }, + { + "key": "ValidateVRayDistributedRendering", + "label": "VRay Distributed Rendering" + }, + { + "key": "ValidateVrayReferencedAOVs", + "label": "VRay Referenced AOVs" + }, + { + "key": "ValidateVRayTranslatorEnabled", + "label": "VRay Translator Settings" + }, + { + "key": "ValidateVrayProxy", + "label": "VRay Proxy Settings" + }, + { + "key": "ValidateVrayProxyMembers", + "label": "VRay Proxy Members" + }, + { + "key": "ValidateYetiRenderScriptCallbacks", + "label": "Yeti Render Script Callbacks" + }, + { + "key": "ValidateYetiRigCacheState", + "label": "Yeti Rig Cache State" + }, + { + "key": "ValidateYetiRigInputShapesInInstance", + "label": "Yeti Rig Input Shapes In Instance" + }, + { + "key": "ValidateYetiRigSettings", + "label": "Yeti Rig Settings" + } + ] + }, { "type": "collapsible-wrap", "label": "Model", @@ -416,6 +497,11 @@ "key": "enabled", "label": "Enabled" }, + { + "type": "boolean", + "key": "optional", + "label": "Optional" + }, { "type": "label", "label": "Validates transform suffix based on the type of its children shapes." @@ -472,6 +558,14 @@ "key": "ValidateMeshNonManifold", "label": "ValidateMeshNonManifold" }, + { + "key": "ValidateMeshNoNegativeScale", + "label": "Validate Mesh No Negative Scale" + }, + { + "key": "ValidateMeshNonZeroEdgeLength", + "label": "Validate Mesh Edge Length Non Zero" + }, { "key": "ValidateMeshNormalsUnlocked", "label": "ValidateMeshNormalsUnlocked" @@ -525,6 +619,18 @@ { "key": "ValidateUniqueNames", "label": "ValidateUniqueNames" + }, + { + "key": "ValidateNoVRayMesh", + "label": "Validate No V-Ray Proxies (VRayMesh)" + }, + { + "key": "ValidateUnrealMeshTriangulated", + "label": "Validate if Mesh is Triangulated" + }, + { + "key": "ValidateAlembicVisibleOnly", + "label": "Validate Alembic visible node" } ] }, @@ -573,6 +679,26 @@ { "key": "ValidateRigControllers", "label": "Validate Rig Controllers" + }, + { + "key": "ValidateAnimationContent", + "label": "Validate Animation Content" + }, + { + "key": "ValidateOutRelatedNodeIds", + "label": "Validate Animation Out Set Related Node Ids" + }, + { + "key": "ValidateRigControllersArnoldAttributes", + "label": "Validate Rig Controllers (Arnold Attributes)" + }, + { + "key": "ValidateSkeletalMeshHierarchy", + "label": "Validate Skeletal Mesh Top Node" + }, + { + "key": "ValidateSkinclusterDeformerSet", + "label": "Validate Skincluster Deformer Relationships" } ] }, @@ -589,6 +715,11 @@ "key": "enabled", "label": "Enabled" }, + { + "type": "boolean", + "key": "optional", + "label": "Optional" + }, { "type": "boolean", "key": "allow_history_only", @@ -611,9 +742,33 @@ "key": "ValidateAssemblyName", "label": "Validate Assembly Name" }, + { + "key": "ValidateAssemblyNamespaces", + "label": "Validate Assembly Namespaces" + }, + { + "key": "ValidateAssemblyModelTransforms", + "label": "Validate Assembly Model Transforms" + }, { "key": "ValidateAssRelativePaths", "label": "ValidateAssRelativePaths" + }, + { + "key": "ValidateInstancerContent", + "label": "Validate Instancer Content" + }, + { + "key": "ValidateInstancerFrameRanges", + "label": "Validate Instancer Cache Frame Ranges" + }, + { + "key": "ValidateNoDefaultCameras", + "label": "Validate No Default Cameras" + }, + { + "key": "ValidateUnrealUpAxis", + "label": "Validate Unreal Up-Axis check" } ] }, From 59395883170cac8ec8c11ca7b66ccd40d499cbc4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?F=C3=A9lix=20David?= Date: Wed, 20 Jul 2022 17:30:40 +0200 Subject: [PATCH 450/785] Change: Asset is put under an AssetType folder --- .../modules/kitsu/utils/update_op_with_zou.py | 60 ++++++++++++------- 1 file changed, 37 insertions(+), 23 deletions(-) diff --git a/openpype/modules/kitsu/utils/update_op_with_zou.py b/openpype/modules/kitsu/utils/update_op_with_zou.py index 4695a49159..e0ff87adf7 100644 --- a/openpype/modules/kitsu/utils/update_op_with_zou.py +++ b/openpype/modules/kitsu/utils/update_op_with_zou.py @@ -14,7 +14,7 @@ from openpype.client import ( get_project, get_assets, get_asset_by_id, - get_asset_by_name + get_asset_by_name, ) from openpype.pipeline import AvalonMongoDB from openpype.api import get_project_settings @@ -154,17 +154,23 @@ def update_op_assets( parent_zou_id = substitute_parent_item["parent_id"] else: parent_zou_id = ( - item.get("parent_id") + # For Asset, put under asset type directory + item.get("entity_type_id") + if item_type == "Asset" + else None + # Else, fallback on usual hierarchy + or item.get("parent_id") or item.get("episode_id") or item.get("source_id") - ) # TODO check consistency + ) - # Substitute Episode and Sequence by Shot - substitute_item_type = ( - "shots" - if item_type in ["Episode", "Sequence"] - else f"{item_type.lower()}s" - ) + # Substitute item type for general classification (assets or shots) + if item_type in ["Asset", "AssetType"]: + substitute_item_type = "assets" + elif item_type in ["Episode", "Sequence"]: + substitute_item_type = "shots" + else: + substitute_item_type = f"{item_type.lower()}s" entity_parent_folders = [ f for f in project_module_settings["entities_root"] @@ -181,8 +187,8 @@ def update_op_assets( # Find root folder docs root_folder_docs = get_assets( project_name, - asset_name=[entity_parent_folders[-1]], - fields=["_id", "data.root_of"] + asset_names=[entity_parent_folders[-1]], + fields=["_id", "data.root_of"], ) # NOTE: Not sure why it's checking for entity type? # OP3 does not support multiple assets with same names so type @@ -219,7 +225,7 @@ def update_op_assets( # Get parent entity parent_entity = parent_doc["data"]["zou"] - parent_zou_id = parent_entity["parent_id"] + parent_zou_id = parent_entity.get("parent_id") # Set root folders parents item_data["parents"] = entity_parent_folders + item_data["parents"] @@ -236,7 +242,7 @@ def update_op_assets( "$set": { "name": item["name"], "data": item_data, - "parent": asset_doc_ids[item["project_id"]]["_id"], + "parent": project_doc["_id"], } }, ) @@ -327,6 +333,10 @@ def sync_all_projects(login: str, password: str): def sync_project_from_kitsu(dbcon: AvalonMongoDB, project: dict): """Update OP project in DB with Zou data. + `root_of` is meant to sort entities by type for a better readability in the data tree. It + puts all shot like (Shot and Episode and Sequence) and asset entities under two different root + folders or hierarchy, defined in settings. + Args: dbcon (AvalonMongoDB): MongoDB connection project (dict): Project dict got using gazu. @@ -341,12 +351,17 @@ def sync_project_from_kitsu(dbcon: AvalonMongoDB, project: dict): # Get all assets from zou all_assets = gazu.asset.all_assets_for_project(project) + all_asset_types = gazu.asset.all_asset_types_for_project(project) all_episodes = gazu.shot.all_episodes_for_project(project) all_seqs = gazu.shot.all_sequences_for_project(project) all_shots = gazu.shot.all_shots_for_project(project) all_entities = [ item - for item in all_assets + all_episodes + all_seqs + all_shots + for item in all_assets + + all_asset_types + + all_episodes + + all_seqs + + all_shots if naming_pattern.match(item["name"]) ] @@ -401,21 +416,20 @@ def sync_project_from_kitsu(dbcon: AvalonMongoDB, project: dict): "data": { "root_of": entity_type, "parents": parent_folders[:i], - "visualParent": direct_parent_doc, + "visualParent": direct_parent_doc.inserted_id + if direct_parent_doc + else None, "tasks": {}, }, } ) # Create - to_insert = [] - to_insert.extend( - [ - create_op_asset(item) - for item in all_entities - if item["id"] not in zou_ids_and_asset_docs.keys() - ] - ) + to_insert = [ + create_op_asset(item) + for item in all_entities + if item["id"] not in zou_ids_and_asset_docs.keys() + ] if to_insert: # Insert doc in DB dbcon.insert_many(to_insert) From a7044dadf7c74260f5c8945cccad196d4de89a1e Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 20 Jul 2022 17:35:27 +0200 Subject: [PATCH 451/785] fix containers varible usage --- openpype/pipeline/load/utils.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/openpype/pipeline/load/utils.py b/openpype/pipeline/load/utils.py index a9aa240ff6..8b12088d3c 100644 --- a/openpype/pipeline/load/utils.py +++ b/openpype/pipeline/load/utils.py @@ -732,7 +732,7 @@ def filter_containers(containers, project_name): some missing entity in database. Args: - containers (list[dict]): List of containers referenced into scene. + containers (Iterable[dict]): List of containers referenced into scene. project_name (str): Name of project in which context shoud look for versions. @@ -741,6 +741,9 @@ def filter_containers(containers, project_name): 'invalid' and 'not_found' containers. """ + # Make sure containers is list that won't change + containers = list(containers) + outdated_containers = [] uptodate_containers = [] not_found_containers = [] From 02cc2166c1ee165b48d697c876675302c8cb77db Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?F=C3=A9lix=20David?= Date: Wed, 20 Jul 2022 17:37:19 +0200 Subject: [PATCH 452/785] docstring linting line length --- openpype/modules/kitsu/utils/update_op_with_zou.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/modules/kitsu/utils/update_op_with_zou.py b/openpype/modules/kitsu/utils/update_op_with_zou.py index e0ff87adf7..cabf4e4d18 100644 --- a/openpype/modules/kitsu/utils/update_op_with_zou.py +++ b/openpype/modules/kitsu/utils/update_op_with_zou.py @@ -333,9 +333,9 @@ def sync_all_projects(login: str, password: str): def sync_project_from_kitsu(dbcon: AvalonMongoDB, project: dict): """Update OP project in DB with Zou data. - `root_of` is meant to sort entities by type for a better readability in the data tree. It - puts all shot like (Shot and Episode and Sequence) and asset entities under two different root - folders or hierarchy, defined in settings. + `root_of` is meant to sort entities by type for a better readability in + the data tree. It puts all shot like (Shot and Episode and Sequence) and + asset entities under two different root folders or hierarchy, defined in settings. Args: dbcon (AvalonMongoDB): MongoDB connection From a3144c9d75e3372e1d7a3008f54e2695c81a51d5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?F=C3=A9lix=20David?= Date: Wed, 20 Jul 2022 17:40:16 +0200 Subject: [PATCH 453/785] docstring linting line length --- openpype/modules/kitsu/utils/update_op_with_zou.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/modules/kitsu/utils/update_op_with_zou.py b/openpype/modules/kitsu/utils/update_op_with_zou.py index cabf4e4d18..46e0fa38f3 100644 --- a/openpype/modules/kitsu/utils/update_op_with_zou.py +++ b/openpype/modules/kitsu/utils/update_op_with_zou.py @@ -333,7 +333,7 @@ def sync_all_projects(login: str, password: str): def sync_project_from_kitsu(dbcon: AvalonMongoDB, project: dict): """Update OP project in DB with Zou data. - `root_of` is meant to sort entities by type for a better readability in + `root_of` is meant to sort entities by type for a better readability in the data tree. It puts all shot like (Shot and Episode and Sequence) and asset entities under two different root folders or hierarchy, defined in settings. From af45aff844ab2ff28ccc76ec99d3e5a9803c92e2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?F=C3=A9lix=20David?= Date: Wed, 20 Jul 2022 17:56:37 +0200 Subject: [PATCH 454/785] docstring linting line length --- openpype/modules/kitsu/utils/update_op_with_zou.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/modules/kitsu/utils/update_op_with_zou.py b/openpype/modules/kitsu/utils/update_op_with_zou.py index 46e0fa38f3..7262d2ee1a 100644 --- a/openpype/modules/kitsu/utils/update_op_with_zou.py +++ b/openpype/modules/kitsu/utils/update_op_with_zou.py @@ -335,7 +335,8 @@ def sync_project_from_kitsu(dbcon: AvalonMongoDB, project: dict): `root_of` is meant to sort entities by type for a better readability in the data tree. It puts all shot like (Shot and Episode and Sequence) and - asset entities under two different root folders or hierarchy, defined in settings. + asset entities under two different root folders or hierarchy, defined in + settings. Args: dbcon (AvalonMongoDB): MongoDB connection From 480d1968124ccf4fe2bc70b109145b016292fe25 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?F=C3=A9lix=20David?= Date: Wed, 20 Jul 2022 17:56:55 +0200 Subject: [PATCH 455/785] docstring linting line length --- openpype/modules/kitsu/utils/update_op_with_zou.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/modules/kitsu/utils/update_op_with_zou.py b/openpype/modules/kitsu/utils/update_op_with_zou.py index 7262d2ee1a..7bfbd42f6a 100644 --- a/openpype/modules/kitsu/utils/update_op_with_zou.py +++ b/openpype/modules/kitsu/utils/update_op_with_zou.py @@ -335,7 +335,7 @@ def sync_project_from_kitsu(dbcon: AvalonMongoDB, project: dict): `root_of` is meant to sort entities by type for a better readability in the data tree. It puts all shot like (Shot and Episode and Sequence) and - asset entities under two different root folders or hierarchy, defined in + asset entities under two different root folders or hierarchy, defined in settings. Args: From b9ffa7720bba5b7bd05a00926a76e7debb9dfc34 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 21 Jul 2022 08:22:49 +0200 Subject: [PATCH 456/785] general: making exctract trim video audio compatible with traypublisher --- .../publish/extract_trim_video_audio.py | 34 ++++++++++++------- 1 file changed, 21 insertions(+), 13 deletions(-) diff --git a/openpype/plugins/publish/extract_trim_video_audio.py b/openpype/plugins/publish/extract_trim_video_audio.py index b0c30283d9..8136ff1a6a 100644 --- a/openpype/plugins/publish/extract_trim_video_audio.py +++ b/openpype/plugins/publish/extract_trim_video_audio.py @@ -40,6 +40,20 @@ class ExtractTrimVideoAudio(openpype.api.Extractor): fps = instance.data["fps"] video_file_path = instance.data["editorialSourcePath"] extensions = instance.data.get("extensions", ["mov"]) + output_file_type = instance.data.get("outputFileType") + + frame_start = int(instance.data["frameStart"]) + frame_end = int(instance.data["frameEnd"]) + handle_start = instance.data["handleStart"] + handle_end = instance.data["handleEnd"] + + clip_start_h = float(instance.data["clipInH"]) + _dur = instance.data["clipDuration"] + handle_dur = (handle_start + handle_end) + clip_dur_h = float(_dur + handle_dur) + + if output_file_type: + extensions = [output_file_type] for ext in extensions: self.log.info("Processing ext: `{}`".format(ext)) @@ -49,16 +63,10 @@ class ExtractTrimVideoAudio(openpype.api.Extractor): clip_trimed_path = os.path.join( staging_dir, instance.data["name"] + ext) - # # check video file metadata - # input_data = plib.get_ffprobe_streams(video_file_path)[0] - # self.log.debug(f"__ input_data: `{input_data}`") - - start = float(instance.data["clipInH"]) - dur = float(instance.data["clipDurationH"]) if ext == ".wav": # offset time as ffmpeg is having bug - start += 0.5 + clip_start_h += 0.5 # remove "review" from families instance.data["families"] = [ fml for fml in instance.data["families"] @@ -67,9 +75,9 @@ class ExtractTrimVideoAudio(openpype.api.Extractor): ffmpeg_args = [ ffmpeg_path, - "-ss", str(start / fps), + "-ss", str(clip_start_h / fps), "-i", video_file_path, - "-t", str(dur / fps) + "-t", str(clip_dur_h / fps) ] if ext in [".mov", ".mp4"]: ffmpeg_args.extend([ @@ -98,10 +106,10 @@ class ExtractTrimVideoAudio(openpype.api.Extractor): "ext": ext[1:], "files": os.path.basename(clip_trimed_path), "stagingDir": staging_dir, - "frameStart": int(instance.data["frameStart"]), - "frameEnd": int(instance.data["frameEnd"]), - "frameStartFtrack": int(instance.data["frameStartH"]), - "frameEndFtrack": int(instance.data["frameEndH"]), + "frameStart": frame_start, + "frameEnd": frame_end, + "frameStartFtrack": frame_start - handle_start, + "frameEndFtrack": frame_end + handle_end, "fps": fps, } From 34f43fe86a664877e7695a09f9e3a29388db0ca1 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 21 Jul 2022 08:24:01 +0200 Subject: [PATCH 457/785] trayp: passing clipDuration attribute --- .../hosts/traypublisher/plugins/create/create_editorial.py | 2 +- .../traypublisher/plugins/publish/collect_clip_instances.py | 4 ++-- .../traypublisher/plugins/publish/collect_shot_instances.py | 2 ++ 3 files changed, 5 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/traypublisher/plugins/create/create_editorial.py b/openpype/hosts/traypublisher/plugins/create/create_editorial.py index 55c4ca76b7..899a45e269 100644 --- a/openpype/hosts/traypublisher/plugins/create/create_editorial.py +++ b/openpype/hosts/traypublisher/plugins/create/create_editorial.py @@ -455,7 +455,6 @@ or updating already created. Publishing will create OTIO file. "instance_label": label, "instance_id": c_instance.data["instance_id"] }) - else: # add review family if defined future_instance_data.update({ @@ -623,6 +622,7 @@ or updating already created. Publishing will create OTIO file. "frameEnd": int(frame_end), "clipIn": int(clip_in), "clipOut": int(clip_out), + "clipDuration": int(clip.duration().value), "sourceIn": int(source_in), "sourceOut": int(source_out) } diff --git a/openpype/hosts/traypublisher/plugins/publish/collect_clip_instances.py b/openpype/hosts/traypublisher/plugins/publish/collect_clip_instances.py index e3dfb1512a..bc86cb8ef3 100644 --- a/openpype/hosts/traypublisher/plugins/publish/collect_clip_instances.py +++ b/openpype/hosts/traypublisher/plugins/publish/collect_clip_instances.py @@ -6,7 +6,7 @@ class CollectClipInstance(pyblish.api.InstancePlugin): """Collect clip instances and resolve its parent""" label = "Collect Clip Instances" - order = pyblish.api.CollectorOrder + order = pyblish.api.CollectorOrder - 0.081 hosts = ["traypublisher"] families = ["plate", "review", "audio"] @@ -29,4 +29,4 @@ class CollectClipInstance(pyblish.api.InstancePlugin): instance.context.data["editorialSourcePath"]) instance.data["families"].append("trimming") - self.log.debug(pformat(instance.data)) \ No newline at end of file + self.log.debug(pformat(instance.data)) diff --git a/openpype/hosts/traypublisher/plugins/publish/collect_shot_instances.py b/openpype/hosts/traypublisher/plugins/publish/collect_shot_instances.py index 86505f76c5..9d8ed8ed72 100644 --- a/openpype/hosts/traypublisher/plugins/publish/collect_shot_instances.py +++ b/openpype/hosts/traypublisher/plugins/publish/collect_shot_instances.py @@ -21,6 +21,7 @@ class CollectShotInstance(pyblish.api.InstancePlugin): "frameEnd", "clipIn", "clipOut", + "clipDuration", "sourceIn", "sourceOut", "otioClip", @@ -99,6 +100,7 @@ class CollectShotInstance(pyblish.api.InstancePlugin): "frameEnd": workfile_start_frame + frame_dur, "clipIn": _cr_attrs["clipIn"], "clipOut": _cr_attrs["clipOut"], + "clipDuration": _cr_attrs["clipDuration"], "sourceIn": _cr_attrs["sourceIn"], "sourceOut": _cr_attrs["sourceOut"], "workfileFrameStart": workfile_start_frame From 449fabf449fcc47e807807dfc8fcbfc9b11a4bc2 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 21 Jul 2022 08:24:29 +0200 Subject: [PATCH 458/785] global: removing trayp host from plugins --- .../publish/collect_otio_subset_resources.py | 20 +++++++++---------- .../publish/extract_otio_trimming_video.py | 2 +- 2 files changed, 11 insertions(+), 11 deletions(-) diff --git a/openpype/plugins/publish/collect_otio_subset_resources.py b/openpype/plugins/publish/collect_otio_subset_resources.py index ca29b82f4e..9c19f8a78e 100644 --- a/openpype/plugins/publish/collect_otio_subset_resources.py +++ b/openpype/plugins/publish/collect_otio_subset_resources.py @@ -23,7 +23,7 @@ class CollectOtioSubsetResources(pyblish.api.InstancePlugin): label = "Collect OTIO Subset Resources" order = pyblish.api.CollectorOrder - 0.077 families = ["clip"] - hosts = ["resolve", "hiero", "flame", "traypublisher"] + hosts = ["resolve", "hiero", "flame"] def process(self, instance): @@ -116,8 +116,10 @@ class CollectOtioSubsetResources(pyblish.api.InstancePlugin): # check in two way if it is sequence if hasattr(otio.schema, "ImageSequenceReference"): # for OpenTimelineIO 0.13 and newer - if isinstance(media_ref, - otio.schema.ImageSequenceReference): + if isinstance( + media_ref, + otio.schema.ImageSequenceReference + ): is_sequence = True else: # for OpenTimelineIO 0.12 and older @@ -139,11 +141,9 @@ class CollectOtioSubsetResources(pyblish.api.InstancePlugin): padding=media_ref.frame_zero_padding ) collection.indexes.update( - [i for i in range(a_frame_start_h, (a_frame_end_h + 1))]) + list(range(a_frame_start_h, (a_frame_end_h + 1))) + ) - self.log.debug(collection) - repre = self._create_representation( - frame_start, frame_end, collection=collection) else: # in case it is file sequence but not new OTIO schema # `ImageSequenceReference` @@ -152,9 +152,9 @@ class CollectOtioSubsetResources(pyblish.api.InstancePlugin): path, trimmed_media_range_h, metadata) self.staging_dir, collection = collection_data - self.log.debug(collection) - repre = self._create_representation( - frame_start, frame_end, collection=collection) + self.log.debug(collection) + repre = self._create_representation( + frame_start, frame_end, collection=collection) else: _trim = False dirname, filename = os.path.split(media_ref.target_url) diff --git a/openpype/plugins/publish/extract_otio_trimming_video.py b/openpype/plugins/publish/extract_otio_trimming_video.py index 46a4056a9d..19625fa568 100644 --- a/openpype/plugins/publish/extract_otio_trimming_video.py +++ b/openpype/plugins/publish/extract_otio_trimming_video.py @@ -20,7 +20,7 @@ class ExtractOTIOTrimmingVideo(openpype.api.Extractor): order = api.ExtractorOrder label = "Extract OTIO trim longer video" families = ["trim"] - hosts = ["resolve", "hiero", "flame", "traypublisher"] + hosts = ["resolve", "hiero", "flame"] def process(self, instance): self.staging_dir = self.staging_dir(instance) From 09182b312eca0ec853e2a2536b2426a0d5218e6e Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 21 Jul 2022 08:31:34 +0200 Subject: [PATCH 459/785] ftrack: adding options for plugin to settings --- openpype/settings/defaults/project_settings/ftrack.json | 3 +++ 1 file changed, 3 insertions(+) diff --git a/openpype/settings/defaults/project_settings/ftrack.json b/openpype/settings/defaults/project_settings/ftrack.json index 70cda68cb4..f6074d5464 100644 --- a/openpype/settings/defaults/project_settings/ftrack.json +++ b/openpype/settings/defaults/project_settings/ftrack.json @@ -447,6 +447,9 @@ "enabled": false, "ftrack_custom_attributes": {} }, + "IntegrateFtrackComponentOverwrite": { + "enabled": true + }, "IntegrateFtrackInstance": { "family_mapping": { "camera": "cam", From b60384f534c8df83738ca35985c74ce1e83b7c03 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 21 Jul 2022 09:12:02 +0200 Subject: [PATCH 460/785] ftrack: optional plugin with optional attributes --- .../integrate_ftrack_component_overwrite.py | 5 ++++- .../projects_schema/schema_project_ftrack.json | 15 +++++++++++++++ 2 files changed, 19 insertions(+), 1 deletion(-) diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_component_overwrite.py b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_component_overwrite.py index 047fd8462c..8cb2336391 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_component_overwrite.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_component_overwrite.py @@ -13,7 +13,10 @@ class IntegrateFtrackComponentOverwrite(pyblish.api.InstancePlugin): active = False def process(self, instance): - component_list = instance.data['ftrackComponentsList'] + component_list = instance.data.get('ftrackComponentsList') + if not component_list: + self.log.info("No component to overwrite...") + return for cl in component_list: cl['component_overwrite'] = True diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_ftrack.json b/openpype/settings/entities/schemas/projects_schema/schema_project_ftrack.json index e008fd85ee..c06bec0f58 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_ftrack.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_ftrack.json @@ -930,6 +930,21 @@ } ] }, + { + "type": "dict", + "collapsible": true, + "checkbox_key": "enabled", + "key": "IntegrateFtrackComponentOverwrite", + "label": "IntegrateFtrackComponentOverwrite", + "is_group": true, + "children": [ + { + "type": "boolean", + "key": "enabled", + "label": "Enabled" + } + ] + }, { "type": "dict", "key": "IntegrateFtrackInstance", From bb9c03a94f1f0060424aa52973e3f14746cd475b Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 21 Jul 2022 09:12:22 +0200 Subject: [PATCH 461/785] ftrack: adding additional families to settings --- openpype/settings/defaults/project_settings/ftrack.json | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/openpype/settings/defaults/project_settings/ftrack.json b/openpype/settings/defaults/project_settings/ftrack.json index f6074d5464..3e86581a03 100644 --- a/openpype/settings/defaults/project_settings/ftrack.json +++ b/openpype/settings/defaults/project_settings/ftrack.json @@ -301,7 +301,9 @@ "traypublisher" ], "families": [ - "plate" + "plate", + "review", + "audio" ], "task_types": [], "tasks": [], From cc47c30d5a45cad1805b2c796f5fae2b214d18ee Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 21 Jul 2022 09:12:52 +0200 Subject: [PATCH 462/785] global: adding trayp families to plugins --- openpype/plugins/publish/extract_otio_file.py | 2 +- openpype/plugins/publish/validate_editorial_asset_name.py | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/openpype/plugins/publish/extract_otio_file.py b/openpype/plugins/publish/extract_otio_file.py index 3bd217d5d4..4d310ce109 100644 --- a/openpype/plugins/publish/extract_otio_file.py +++ b/openpype/plugins/publish/extract_otio_file.py @@ -12,7 +12,7 @@ class ExtractOTIOFile(openpype.api.Extractor): label = "Extract OTIO file" order = pyblish.api.ExtractorOrder - 0.45 families = ["workfile"] - hosts = ["resolve", "hiero"] + hosts = ["resolve", "hiero", "traypublisher"] def process(self, instance): # create representation data diff --git a/openpype/plugins/publish/validate_editorial_asset_name.py b/openpype/plugins/publish/validate_editorial_asset_name.py index 702e87b58d..694788c414 100644 --- a/openpype/plugins/publish/validate_editorial_asset_name.py +++ b/openpype/plugins/publish/validate_editorial_asset_name.py @@ -19,7 +19,8 @@ class ValidateEditorialAssetName(pyblish.api.ContextPlugin): "hiero", "standalonepublisher", "resolve", - "flame" + "flame", + "traypublisher" ] def process(self, context): From f7a6a606f53ae3f3ea376dd546f8f7958953c17b Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 21 Jul 2022 09:13:13 +0200 Subject: [PATCH 463/785] global: dealing with reviewable in trim audio/video plugin --- openpype/plugins/publish/extract_trim_video_audio.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/openpype/plugins/publish/extract_trim_video_audio.py b/openpype/plugins/publish/extract_trim_video_audio.py index 8136ff1a6a..06817c4b5a 100644 --- a/openpype/plugins/publish/extract_trim_video_audio.py +++ b/openpype/plugins/publish/extract_trim_video_audio.py @@ -41,6 +41,7 @@ class ExtractTrimVideoAudio(openpype.api.Extractor): video_file_path = instance.data["editorialSourcePath"] extensions = instance.data.get("extensions", ["mov"]) output_file_type = instance.data.get("outputFileType") + reviewable = "review" in instance.data["families"] frame_start = int(instance.data["frameStart"]) frame_end = int(instance.data["frameEnd"]) @@ -111,9 +112,10 @@ class ExtractTrimVideoAudio(openpype.api.Extractor): "frameStartFtrack": frame_start - handle_start, "frameEndFtrack": frame_end + handle_end, "fps": fps, + "tags": [] } - if ext in [".mov", ".mp4"]: + if ext in [".mov", ".mp4"] and reviewable: repre.update({ "thumbnail": True, "tags": ["review", "ftrackreview", "delete"]}) From 7e6569fdd261481fde442c85452219441ceb629d Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 21 Jul 2022 09:13:36 +0200 Subject: [PATCH 464/785] global: adding trayp family --- .../ftrack/plugins/publish/integrate_hierarchy_ftrack.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/openpype/modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py b/openpype/modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py index 1a5d74bf26..b8855ee2bd 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py @@ -65,7 +65,13 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin): order = pyblish.api.IntegratorOrder - 0.04 label = 'Integrate Hierarchy To Ftrack' families = ["shot"] - hosts = ["hiero", "resolve", "standalonepublisher", "flame"] + hosts = [ + "hiero", + "resolve", + "standalonepublisher", + "flame", + "traypublisher" + ] optional = False def process(self, context): From 97879475732a5edef30d1ff625b6e2b01a0dd81a Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 21 Jul 2022 09:14:13 +0200 Subject: [PATCH 465/785] trayp: collect review input to instance data --- .../plugins/publish/collect_editorial_reviewable.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/openpype/hosts/traypublisher/plugins/publish/collect_editorial_reviewable.py b/openpype/hosts/traypublisher/plugins/publish/collect_editorial_reviewable.py index 6cd8c42546..2e4ad9e181 100644 --- a/openpype/hosts/traypublisher/plugins/publish/collect_editorial_reviewable.py +++ b/openpype/hosts/traypublisher/plugins/publish/collect_editorial_reviewable.py @@ -19,12 +19,8 @@ class CollectEditorialReviewable(pyblish.api.InstancePlugin): return creator_attributes = instance.data["creator_attributes"] - repre = instance.data["representations"][0] if creator_attributes["add_review_family"]: - repre["tags"].append("review") instance.data["families"].append("review") - instance.data["representations"] = [repre] - self.log.debug("instance.data {}".format(instance.data)) From 0ea71b05fb0e38c925a771dd551088344ce2479e Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 21 Jul 2022 09:24:28 +0200 Subject: [PATCH 466/785] global: adding review family to filters with non trayp exception --- openpype/plugins/publish/extract_thumbnail.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/openpype/plugins/publish/extract_thumbnail.py b/openpype/plugins/publish/extract_thumbnail.py index 7933595b89..b4c4bb2036 100644 --- a/openpype/plugins/publish/extract_thumbnail.py +++ b/openpype/plugins/publish/extract_thumbnail.py @@ -20,7 +20,7 @@ class ExtractThumbnail(pyblish.api.InstancePlugin): order = pyblish.api.ExtractorOrder families = [ "imagesequence", "render", "render2d", "prerender", - "source", "plate", "take" + "source", "plate", "take", "review" ] hosts = ["shell", "fusion", "resolve", "traypublisher"] enabled = False @@ -29,6 +29,14 @@ class ExtractThumbnail(pyblish.api.InstancePlugin): ffmpeg_args = None def process(self, instance): + # make sure this apply only to reveiw in both family keys + # HACK: only traypublisher review family is allowed + if ( + instance.data["family"] != "review" + and "review" in instance.data["families"] + ): + return + self.log.info("subset {}".format(instance.data['subset'])) # skip crypto passes. From 92b611d76aabb35fefb3f34430a9679b5c9b28da Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 21 Jul 2022 09:58:17 +0200 Subject: [PATCH 467/785] Raise AttributeError instead of ImportError on missing attribute in 'openpype_interfaces' --- openpype/modules/base.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/openpype/modules/base.py b/openpype/modules/base.py index b9ccec13cc..1bd343fd07 100644 --- a/openpype/modules/base.py +++ b/openpype/modules/base.py @@ -49,6 +49,7 @@ class _ModuleClass(object): Object of this class can be stored to `sys.modules` and used for storing dynamically imported modules. """ + def __init__(self, name): # Call setattr on super class super(_ModuleClass, self).__setattr__("name", name) @@ -116,12 +117,13 @@ class _InterfacesClass(_ModuleClass): - this is because interfaces must be available even if are missing implementation """ + def __getattr__(self, attr_name): if attr_name not in self.__attributes__: if attr_name in ("__path__", "__file__"): return None - raise ImportError(( + raise AttributeError(( "cannot import name '{}' from 'openpype_interfaces'" ).format(attr_name)) From 635164b00c6573396096c72268e71a4a062688ff Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 21 Jul 2022 10:40:39 +0200 Subject: [PATCH 468/785] added HiddenCreator description --- website/docs/dev_publishing.md | 18 ++++++++++++++++-- 1 file changed, 16 insertions(+), 2 deletions(-) diff --git a/website/docs/dev_publishing.md b/website/docs/dev_publishing.md index 8ee3b7e85f..c949fa8570 100644 --- a/website/docs/dev_publishing.md +++ b/website/docs/dev_publishing.md @@ -66,7 +66,7 @@ Another optional function is **get_current_context**. This function is handy in Main responsibility of create plugin is to create, update, collect and remove instance metadata and propagate changes to create context. Has access to **CreateContext** (`self.create_context`) that discovered the plugin so has also access to other creators and instances. Create plugins have a lot of responsibility so it is recommended to implement common code per host. #### *BaseCreator* -Base implementation of creator plugin. It is not recommended to use this class as base for production plugins but rather use one of **AutoCreator** and **Creator** variants. +Base implementation of creator plugin. It is not recommended to use this class as base for production plugins but rather use one of **HiddenCreator**, **AutoCreator** and **Creator** variants. **Abstractions** - **`family`** (class attr) - Tells what kind of instance will be created. @@ -92,7 +92,7 @@ def collect_instances(self): self._add_instance_to_context(instance) ``` -- **`create`** (method) - Create a new object of **CreatedInstance** store its metadata to the workfile and add the instance into the created context. Failed Creating should raise **CreatorError** if an error happens that artists can fix or give them some useful information. Triggers and implementation differs for **Creator** and **AutoCreator**. +- **`create`** (method) - Create a new object of **CreatedInstance** store its metadata to the workfile and add the instance into the created context. Failed Creating should raise **CreatorError** if an error happens that artists can fix or give them some useful information. Triggers and implementation differs for **Creator**, **HiddenCreator** and **AutoCreator**. - **`update_instances`** (method) - Update data of instances. Receives tuple with **instance** and **changes**. ```python @@ -199,6 +199,20 @@ class RenderLayerCreator(Creator): - **`get_dynamic_data`** (method) - Can be used to extend data for subset templates which may be required in some cases. +#### *HiddenCreator* +Creator which is not showed in UI so artist can't trigger it directly but is available for other creators. This creator is primarily meant for cases when creation should create different types of instances. For example during editorial publishing where input is single edl file but should create 2 or more kind of instances each with different family, attributes and abilities. Arguments for creation were limited to `instance_data` and `source_data`. Data of `instance_data` should follow what is sent to other creators and `source_data` can be used to send custom data defined by main creator. It is expected that `HiddenCreator` has specific main or "parent" creator. + +```python +def create(self, instance_data, source_data): + variant = instance_data["variant"] + task_name = instance_data["task"] + asset_name = instance_data["asset"] + asset_doc = get_asset_by_name(self.project_name, asset_name) + self.get_subset_name( + variant, task_name, asset_doc, self.project_name, self.host_name) +``` + + #### *AutoCreator* Creator that is triggered on reset of create context. Can be used for families that are expected to be created automatically without artist interaction (e.g. **workfile**). Method `create` is triggered after collecting all creators. From a8d99a6f91d8fc44bda465a6a5f3f5425931eb75 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 21 Jul 2022 10:41:56 +0200 Subject: [PATCH 469/785] changed imports of 'attribute_deffinitions' --- website/docs/dev_publishing.md | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/website/docs/dev_publishing.md b/website/docs/dev_publishing.md index c949fa8570..5266ece72c 100644 --- a/website/docs/dev_publishing.md +++ b/website/docs/dev_publishing.md @@ -172,11 +172,11 @@ class RenderLayerCreator(Creator): icon = "fa5.building" ``` -- **`get_instance_attr_defs`** (method) - Attribute definitions of instance. Creator can define attribute values with default values for each instance. These attributes may affect how instances will be instance processed during publishing. Attribute defiitions can be used from `openpype.pipeline.lib.attribute_definitions` (NOTE: Will be moved to `openpype.lib.attribute_definitions` soon). Attribute definitions define basic types of values for different cases e.g. boolean, number, string, enumerator, etc. Default implementation returns **instance_attr_defs**. +- **`get_instance_attr_defs`** (method) - Attribute definitions of instance. Creator can define attribute values with default values for each instance. These attributes may affect how instances will be instance processed during publishing. Attribute defiitions can be used from `openpype.lib.attribute_definitions`. Attribute definitions define basic types of values for different cases e.g. boolean, number, string, enumerator, etc. Default implementation returns **instance_attr_defs**. - **`instance_attr_defs`** (attr) - Attribute for default implementation of **get_instance_attr_defs**. ```python -from openpype.pipeline import attribute_definitions +from openpype.lib import attribute_definitions class RenderLayerCreator(Creator): @@ -311,7 +311,8 @@ class BulkRenderCreator(Creator): - **`pre_create_attr_defs`** (attr) - Attribute for default implementation of **get_pre_create_attr_defs**. ```python -from openpype.pipeline import Creator, attribute_definitions +from openpype.lib import attribute_definitions +from openpype.pipeline.create import Creator class CreateRender(Creator): @@ -484,10 +485,8 @@ Possible attribute definitions can be found in `openpype/pipeline/lib/attribute_ ```python import pyblish.api -from openpype.pipeline import ( - OpenPypePyblishPluginMixin, - attribute_definitions, -) +from openpype.lib import attribute_definitions +from openpype.pipeline import OpenPypePyblishPluginMixin # Example context plugin From 6d093b92d9db498ab40dbc2b5bdc7a93f7581ebb Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 21 Jul 2022 10:42:12 +0200 Subject: [PATCH 470/785] changed queries and access to current session --- website/docs/dev_publishing.md | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/website/docs/dev_publishing.md b/website/docs/dev_publishing.md index 5266ece72c..f11a2c3047 100644 --- a/website/docs/dev_publishing.md +++ b/website/docs/dev_publishing.md @@ -248,14 +248,14 @@ def create(self): # - variant can be filled from settings variant = self._variant_name # Only place where we can look for current context - project_name = io.Session["AVALON_PROJECT"] - asset_name = io.Session["AVALON_ASSET"] - task_name = io.Session["AVALON_TASK"] - host_name = io.Session["AVALON_APP"] + project_name = self.project_name + asset_name = legacy_io.Session["AVALON_ASSET"] + task_name = legacy_io.Session["AVALON_TASK"] + host_name = legacy_io.Session["AVALON_APP"] # Create new instance if does not exist yet if existing_instance is None: - asset_doc = io.find_one({"type": "asset", "name": asset_name}) + asset_doc = get_asset_by_name(project_name, asset_name) subset_name = self.get_subset_name( variant, task_name, asset_doc, project_name, host_name ) @@ -278,7 +278,7 @@ def create(self): existing_instance["asset"] != asset_name or existing_instance["task"] != task_name ): - asset_doc = io.find_one({"type": "asset", "name": asset_name}) + asset_doc = get_asset_by_name(project_name, asset_name) subset_name = self.get_subset_name( variant, task_name, asset_doc, project_name, host_name ) From 2209bcf6b10f4e65a25e58b34373178aa8b92648 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 21 Jul 2022 11:31:44 +0200 Subject: [PATCH 471/785] check for 'ILoadHost' to call different method on host --- openpype/pipeline/load/utils.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/openpype/pipeline/load/utils.py b/openpype/pipeline/load/utils.py index 8b12088d3c..fe5102353d 100644 --- a/openpype/pipeline/load/utils.py +++ b/openpype/pipeline/load/utils.py @@ -7,6 +7,7 @@ import inspect import collections import numbers +from openpype.host import ILoadHost from openpype.client import ( get_project, get_assets, @@ -719,7 +720,11 @@ def get_outdated_containers(host=None, project_name=None): if project_name is None: project_name = legacy_io.active_project() - containers = host.ls() + + if isinstance(host, ILoadHost): + containers = host.get_containers() + else: + containers = host.ls() return filter_containers(containers, project_name).outdated From dc7856e919d3b7536c1bd5643d1b0e7ccbc8d059 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 21 Jul 2022 11:56:20 +0200 Subject: [PATCH 472/785] trayp: processing PR comments --- openpype/hosts/traypublisher/api/editorial.py | 20 ++++--- .../plugins/create/create_editorial.py | 55 ++++++------------- .../plugins/create/create_from_settings.py | 3 - .../plugins/publish/collect_clip_instances.py | 8 ++- .../publish/collect_editorial_reviewable.py | 2 - .../plugins/publish/collect_shot_instances.py | 2 +- 6 files changed, 37 insertions(+), 53 deletions(-) diff --git a/openpype/hosts/traypublisher/api/editorial.py b/openpype/hosts/traypublisher/api/editorial.py index 948e05ec61..d6f876ab76 100644 --- a/openpype/hosts/traypublisher/api/editorial.py +++ b/openpype/hosts/traypublisher/api/editorial.py @@ -5,7 +5,7 @@ from openpype.client import get_asset_by_id from openpype.pipeline.create import CreatorError -class ShotMetadataSover: +class ShotMetadataSolver: """Collecting hierarchy context from `parents` and `hierarchy` data present in `clip` family instances coming from the request json data file @@ -22,12 +22,18 @@ class ShotMetadataSover: shot_hierarchy = None shot_add_tasks = None - def __init__(self, creator_settings, logger): - self.clip_name_tokenizer = creator_settings["clip_name_tokenizer"] - self.shot_rename = creator_settings["shot_rename"] - self.shot_hierarchy = creator_settings["shot_hierarchy"] - self.shot_add_tasks = creator_settings["shot_add_tasks"] - + def __init__( + self, + clip_name_tokenizer, + shot_rename, + shot_hierarchy, + shot_add_tasks, + logger + ): + self.clip_name_tokenizer = clip_name_tokenizer + self.shot_rename = shot_rename + self.shot_hierarchy = shot_hierarchy + self.shot_add_tasks = shot_add_tasks self.log = logger def _rename_template(self, data): diff --git a/openpype/hosts/traypublisher/plugins/create/create_editorial.py b/openpype/hosts/traypublisher/plugins/create/create_editorial.py index 899a45e269..7b2585d630 100644 --- a/openpype/hosts/traypublisher/plugins/create/create_editorial.py +++ b/openpype/hosts/traypublisher/plugins/create/create_editorial.py @@ -11,7 +11,7 @@ from openpype.hosts.traypublisher.api.plugin import ( HiddenTrayPublishCreator ) from openpype.hosts.traypublisher.api.editorial import ( - ShotMetadataSover + ShotMetadataSolver ) from openpype.pipeline import CreatedInstance @@ -65,13 +65,6 @@ CLIP_ATTR_DEFS = [ class EditorialClipInstanceCreatorBase(HiddenTrayPublishCreator): host_name = "traypublisher" - def __init__( - self, project_settings, *args, **kwargs - ): - super(EditorialClipInstanceCreatorBase, self).__init__( - project_settings, *args, **kwargs - ) - def create(self, instance_data, source_data=None): self.log.info(f"instance_data: {instance_data}") subset_name = instance_data["subset"] @@ -106,13 +99,6 @@ class EditorialShotInstanceCreator(EditorialClipInstanceCreatorBase): family = "shot" label = "Editorial Shot" - def __init__( - self, project_settings, *args, **kwargs - ): - super(EditorialShotInstanceCreator, self).__init__( - project_settings, *args, **kwargs - ) - def get_instance_attr_defs(self): attr_defs = [ TextDef( @@ -123,44 +109,24 @@ class EditorialShotInstanceCreator(EditorialClipInstanceCreatorBase): attr_defs.extend(CLIP_ATTR_DEFS) return attr_defs + class EditorialPlateInstanceCreator(EditorialClipInstanceCreatorBase): identifier = "editorial_plate" family = "plate" label = "Editorial Plate" - def __init__( - self, project_settings, *args, **kwargs - ): - super(EditorialPlateInstanceCreator, self).__init__( - project_settings, *args, **kwargs - ) - class EditorialAudioInstanceCreator(EditorialClipInstanceCreatorBase): identifier = "editorial_audio" family = "audio" label = "Editorial Audio" - def __init__( - self, project_settings, *args, **kwargs - ): - super(EditorialAudioInstanceCreator, self).__init__( - project_settings, *args, **kwargs - ) - class EditorialReviewInstanceCreator(EditorialClipInstanceCreatorBase): identifier = "editorial_review" family = "review" label = "Editorial Review" - def __init__( - self, project_settings, *args, **kwargs - ): - super(EditorialReviewInstanceCreator, self).__init__( - project_settings, *args, **kwargs - ) - class EditorialSimpleCreator(TrayPublishCreator): @@ -188,8 +154,19 @@ or updating already created. Publishing will create OTIO file. ) # get this creator settings by identifier self._creator_settings = editorial_creators.get(self.identifier) - self._shot_metadata_solver = ShotMetadataSover( - self._creator_settings, self.log) + + clip_name_tokenizer = self._creator_settings["clip_name_tokenizer"] + shot_rename = self._creator_settings["shot_rename"] + shot_hierarchy = self._creator_settings["shot_hierarchy"] + shot_add_tasks = self._creator_settings["shot_add_tasks"] + + self._shot_metadata_solver = ShotMetadataSolver( + clip_name_tokenizer, + shot_rename, + shot_hierarchy, + shot_add_tasks, + self.log + ) # try to set main attributes from settings if self._creator_settings.get("default_variants"): @@ -717,4 +694,4 @@ or updating already created. Publishing will create OTIO file. attr_defs.append(UISeparatorDef()) attr_defs.extend(CLIP_ATTR_DEFS) - return attr_defs \ No newline at end of file + return attr_defs diff --git a/openpype/hosts/traypublisher/plugins/create/create_from_settings.py b/openpype/hosts/traypublisher/plugins/create/create_from_settings.py index 1271e03fdb..41c1c29bb0 100644 --- a/openpype/hosts/traypublisher/plugins/create/create_from_settings.py +++ b/openpype/hosts/traypublisher/plugins/create/create_from_settings.py @@ -1,5 +1,4 @@ import os -from pprint import pformat from openpype.api import get_project_settings, Logger log = Logger.get_logger(__name__) @@ -16,8 +15,6 @@ def initialize(): global_variables = globals() for item in simple_creators: - log.debug(pformat(item)) - dynamic_plugin = SettingsCreator.from_settings(item) global_variables[dynamic_plugin.__name__] = dynamic_plugin diff --git a/openpype/hosts/traypublisher/plugins/publish/collect_clip_instances.py b/openpype/hosts/traypublisher/plugins/publish/collect_clip_instances.py index bc86cb8ef3..ca269a9c27 100644 --- a/openpype/hosts/traypublisher/plugins/publish/collect_clip_instances.py +++ b/openpype/hosts/traypublisher/plugins/publish/collect_clip_instances.py @@ -13,7 +13,13 @@ class CollectClipInstance(pyblish.api.InstancePlugin): def process(self, instance): creator_identifier = instance.data["creator_identifier"] - if "editorial" not in creator_identifier: + if ( + creator_identifier not in [ + "editorial_plate", + "editorial_audio", + "editorial_review" + ] + ): return instance.data["families"].append("clip") diff --git a/openpype/hosts/traypublisher/plugins/publish/collect_editorial_reviewable.py b/openpype/hosts/traypublisher/plugins/publish/collect_editorial_reviewable.py index 2e4ad9e181..34f7a9ead8 100644 --- a/openpype/hosts/traypublisher/plugins/publish/collect_editorial_reviewable.py +++ b/openpype/hosts/traypublisher/plugins/publish/collect_editorial_reviewable.py @@ -1,5 +1,3 @@ -import os - import pyblish.api diff --git a/openpype/hosts/traypublisher/plugins/publish/collect_shot_instances.py b/openpype/hosts/traypublisher/plugins/publish/collect_shot_instances.py index 9d8ed8ed72..e6f1173bc4 100644 --- a/openpype/hosts/traypublisher/plugins/publish/collect_shot_instances.py +++ b/openpype/hosts/traypublisher/plugins/publish/collect_shot_instances.py @@ -166,4 +166,4 @@ class CollectShotInstance(pyblish.api.InstancePlugin): else: new_dict[key] = ex_dict[key] - return new_dict \ No newline at end of file + return new_dict From dfa6328d74fbbd806769d3689ccc1b2f85dc757e Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 21 Jul 2022 11:58:42 +0200 Subject: [PATCH 473/785] remove metadata from default environment values --- openpype/settings/defaults/system_settings/general.json | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/openpype/settings/defaults/system_settings/general.json b/openpype/settings/defaults/system_settings/general.json index a06947ba77..909ffc1ee4 100644 --- a/openpype/settings/defaults/system_settings/general.json +++ b/openpype/settings/defaults/system_settings/general.json @@ -2,11 +2,7 @@ "studio_name": "Studio name", "studio_code": "stu", "admin_password": "", - "environment": { - "__environment_keys__": { - "global": [] - } - }, + "environment": {}, "log_to_server": true, "disk_mapping": { "windows": [], From 5d49d9c3d2876bddc4a1856b50e6ef94bf0c90d2 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 21 Jul 2022 12:29:38 +0200 Subject: [PATCH 474/785] trayp: adding universal attribute for new asset creation --- openpype/hosts/traypublisher/plugins/create/create_editorial.py | 2 ++ openpype/plugins/publish/validate_asset_docs.py | 2 +- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/traypublisher/plugins/create/create_editorial.py b/openpype/hosts/traypublisher/plugins/create/create_editorial.py index 7b2585d630..fcaaeb1e75 100644 --- a/openpype/hosts/traypublisher/plugins/create/create_editorial.py +++ b/openpype/hosts/traypublisher/plugins/create/create_editorial.py @@ -550,6 +550,8 @@ or updating already created. Publishing will create OTIO file. "asset": parent_asset_name, "task": "", + "new_asset_publishing": True, + # parent time properties "trackStartFrame": track_start_frame, "timelineOffset": timeline_offset, diff --git a/openpype/plugins/publish/validate_asset_docs.py b/openpype/plugins/publish/validate_asset_docs.py index daeb442f28..9f997d4817 100644 --- a/openpype/plugins/publish/validate_asset_docs.py +++ b/openpype/plugins/publish/validate_asset_docs.py @@ -24,7 +24,7 @@ class ValidateAssetDocs(pyblish.api.InstancePlugin): if instance.data.get("assetEntity"): self.log.info("Instance has set asset document in its data.") - elif "editorial" in instance.data.get("creator_identifier", ""): + elif instance.context.data.get("new_asset_publishing"): # skip if it is editorial self.log.info("Editorial instance is no need to check...") From 7c30798bec528b8410fda39dd409022696afbf95 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 21 Jul 2022 12:30:11 +0200 Subject: [PATCH 475/785] global: removing redundant check --- openpype/plugins/publish/extract_thumbnail.py | 9 +-------- 1 file changed, 1 insertion(+), 8 deletions(-) diff --git a/openpype/plugins/publish/extract_thumbnail.py b/openpype/plugins/publish/extract_thumbnail.py index b4c4bb2036..89738a8063 100644 --- a/openpype/plugins/publish/extract_thumbnail.py +++ b/openpype/plugins/publish/extract_thumbnail.py @@ -20,7 +20,7 @@ class ExtractThumbnail(pyblish.api.InstancePlugin): order = pyblish.api.ExtractorOrder families = [ "imagesequence", "render", "render2d", "prerender", - "source", "plate", "take", "review" + "source", "clip", "take" ] hosts = ["shell", "fusion", "resolve", "traypublisher"] enabled = False @@ -29,13 +29,6 @@ class ExtractThumbnail(pyblish.api.InstancePlugin): ffmpeg_args = None def process(self, instance): - # make sure this apply only to reveiw in both family keys - # HACK: only traypublisher review family is allowed - if ( - instance.data["family"] != "review" - and "review" in instance.data["families"] - ): - return self.log.info("subset {}".format(instance.data['subset'])) From 60adefa5ccf4cf737c8f78338e8e8a5173045726 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 21 Jul 2022 14:54:21 +0200 Subject: [PATCH 476/785] global: renaming `newAssetPublishing` --- openpype/hosts/traypublisher/plugins/create/create_editorial.py | 2 +- openpype/plugins/publish/validate_asset_docs.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/traypublisher/plugins/create/create_editorial.py b/openpype/hosts/traypublisher/plugins/create/create_editorial.py index fcaaeb1e75..db0287129a 100644 --- a/openpype/hosts/traypublisher/plugins/create/create_editorial.py +++ b/openpype/hosts/traypublisher/plugins/create/create_editorial.py @@ -550,7 +550,7 @@ or updating already created. Publishing will create OTIO file. "asset": parent_asset_name, "task": "", - "new_asset_publishing": True, + "newAssetPublishing": True, # parent time properties "trackStartFrame": track_start_frame, diff --git a/openpype/plugins/publish/validate_asset_docs.py b/openpype/plugins/publish/validate_asset_docs.py index 9f997d4817..dbec9edd7b 100644 --- a/openpype/plugins/publish/validate_asset_docs.py +++ b/openpype/plugins/publish/validate_asset_docs.py @@ -24,7 +24,7 @@ class ValidateAssetDocs(pyblish.api.InstancePlugin): if instance.data.get("assetEntity"): self.log.info("Instance has set asset document in its data.") - elif instance.context.data.get("new_asset_publishing"): + elif instance.context.data.get("newAssetPublishing"): # skip if it is editorial self.log.info("Editorial instance is no need to check...") From cf2e5177dd6b7635cdcf0b53720375abf67dd2c2 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 21 Jul 2022 15:32:25 +0200 Subject: [PATCH 477/785] trayp: adding docstrings --- openpype/hosts/traypublisher/api/editorial.py | 89 +++++++++++++++++-- 1 file changed, 84 insertions(+), 5 deletions(-) diff --git a/openpype/hosts/traypublisher/api/editorial.py b/openpype/hosts/traypublisher/api/editorial.py index d6f876ab76..92ad65a851 100644 --- a/openpype/hosts/traypublisher/api/editorial.py +++ b/openpype/hosts/traypublisher/api/editorial.py @@ -6,12 +6,12 @@ from openpype.pipeline.create import CreatorError class ShotMetadataSolver: - """Collecting hierarchy context from `parents` and `hierarchy` data - present in `clip` family instances coming from the request json data file + """ Solving hierarchical metadata - It will add `hierarchical_context` into each instance for integrate - plugins to be able to create needed parents for the context if they - don't exist yet + Used during editorial publishing. Works with imput + clip name and settings defining python formatable + template. Settings also define searching patterns + and its token keys used for formating in templates. """ NO_DECOR_PATERN = re.compile(r"\{([a-z]*?)\}") @@ -37,6 +37,17 @@ class ShotMetadataSolver: self.log = logger def _rename_template(self, data): + """Shot renaming function + + Args: + data (dict): formating data + + Raises: + CreatorError: If missing keys + + Returns: + str: formated new name + """ shot_rename_template = self.shot_rename[ "shot_rename_template"] try: @@ -51,6 +62,20 @@ class ShotMetadataSolver: )) def _generate_tokens(self, clip_name, source_data): + """Token generator + + Settings defines token pairs key and regex expression. + + Args: + clip_name (str): name of clip in editorial + source_data (dict): data for formating + + Raises: + CreatorError: if missing key + + Returns: + dict: updated source_data + """ output_data = deepcopy(source_data["anatomy_data"]) output_data["clip_name"] = clip_name @@ -78,7 +103,20 @@ class ShotMetadataSolver: return output_data def _create_parents_from_settings(self, parents, data): + """Formating parent components. + Args: + parents (list): list of dict parent components + data (dict): formating data + + Raises: + CreatorError: missing formating key + CreatorError: missing token key + KeyError: missing parent token + + Returns: + list: list of dict of parent components + """ # fill the parents parts from presets shot_hierarchy = deepcopy(self.shot_hierarchy) hierarchy_parents = shot_hierarchy["parents"] @@ -152,6 +190,14 @@ class ShotMetadataSolver: return parents def _create_hierarchy_path(self, parents): + """Converting hierarchy path from parents + + Args: + parents (list): list of dict parent components + + Returns: + str: hierarchy path + """ return "/".join( [ p["entity_name"] for p in parents @@ -164,6 +210,17 @@ class ShotMetadataSolver: asset_doc, project_doc ): + """Returning parents from context on selected asset. + + Context defined in Traypublisher project tree. + + Args: + asset_doc (db obj): selected asset doc + project_doc (db obj): actual project doc + + Returns: + list: list of dict parent components + """ project_name = project_doc["name"] visual_hierarchy = [asset_doc] current_doc = asset_doc @@ -192,6 +249,17 @@ class ShotMetadataSolver: ] def _generate_tasks_from_settings(self, project_doc): + """Convert settings inputs to task data. + + Args: + project_doc (db obj): actual project doc + + Raises: + KeyError: Missing task type in project doc + + Returns: + dict: tasks data + """ tasks_to_add = {} project_tasks = project_doc["config"]["tasks"] @@ -214,6 +282,17 @@ class ShotMetadataSolver: return tasks_to_add def generate_data(self, clip_name, source_data): + """Metadata generator. + + Converts input data to hierarchy mentadata. + + Args: + clip_name (str): clip name + source_data (dict): formating data + + Returns: + (str, dict): shot name and hierarchy data + """ self.log.info(f"_ source_data: {source_data}") tasks = {} From 976411521bf4e7f2db521813b9622e16dd62e800 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 22 Jul 2022 11:10:41 +0200 Subject: [PATCH 478/785] trayp: addresing issue from PR - different edl test https://github.com/pypeclub/OpenPype/pull/3492#pullrequestreview-1047573472 --- .../traypublisher/plugins/create/create_editorial.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/traypublisher/plugins/create/create_editorial.py b/openpype/hosts/traypublisher/plugins/create/create_editorial.py index db0287129a..d6d669a56c 100644 --- a/openpype/hosts/traypublisher/plugins/create/create_editorial.py +++ b/openpype/hosts/traypublisher/plugins/create/create_editorial.py @@ -18,6 +18,7 @@ from openpype.pipeline import CreatedInstance from openpype.lib import ( get_ffprobe_data, + convert_ffprobe_fps_value, FileDef, TextDef, @@ -259,6 +260,7 @@ or updating already created. Publishing will create OTIO file. # EDL has no frame rate embedded so needs explicit # frame rate else 24 is asssumed. kwargs["rate"] = fps + kwargs["ignore_timecode_mismatch"] = True self.log.info(f"kwargs: {kwargs}") return otio.adapters.read_from_file(sequence_path, **kwargs) @@ -387,7 +389,11 @@ or updating already created. Publishing will create OTIO file. "video": True, "start_frame": 0, "duration": int(video_stream["nb_frames"]), - "fps": float(video_stream["r_frame_rate"][:-2]) + "fps": float( + convert_ffprobe_fps_value( + video_stream["r_frame_rate"] + ) + ) } # get audio streams data From 78b4bbadc92ec29167af3487e1b597e07a40f35e Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 22 Jul 2022 11:39:22 +0200 Subject: [PATCH 479/785] add continuos arguments next to each other --- openpype/plugins/publish/extract_review_slate.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/plugins/publish/extract_review_slate.py b/openpype/plugins/publish/extract_review_slate.py index 2edaf10e6b..28deb360be 100644 --- a/openpype/plugins/publish/extract_review_slate.py +++ b/openpype/plugins/publish/extract_review_slate.py @@ -295,12 +295,14 @@ class ExtractReviewSlate(openpype.api.Extractor): # this will reencode the output if input_audio: fmap = [ + "-filter_complex", "[0:v] [0:a] [1:v] [1:a] concat=n=2:v=1:a=1 [v] [a]", "-map", '[v]', "-map", '[a]' ] else: fmap = [ + "-filter_complex", "[0:v] [1:v] concat=n=2:v=1:a=0 [v]", "-map", '[v]' ] @@ -308,7 +310,6 @@ class ExtractReviewSlate(openpype.api.Extractor): ffmpeg_path, "-i", slate_v_path, "-i", input_path, - "-filter_complex", ] concat_args.extend(fmap) if offset_timecode: From f99f811ddd4194caadd27a481aef766eae2e5727 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 22 Jul 2022 11:39:37 +0200 Subject: [PATCH 480/785] add `-y` into base of ffmpeg arguments --- openpype/plugins/publish/extract_review_slate.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/openpype/plugins/publish/extract_review_slate.py b/openpype/plugins/publish/extract_review_slate.py index 28deb360be..90dad00b97 100644 --- a/openpype/plugins/publish/extract_review_slate.py +++ b/openpype/plugins/publish/extract_review_slate.py @@ -308,6 +308,7 @@ class ExtractReviewSlate(openpype.api.Extractor): ] concat_args = [ ffmpeg_path, + "-y", "-i", slate_v_path, "-i", input_path, ] @@ -319,6 +320,7 @@ class ExtractReviewSlate(openpype.api.Extractor): # - keep format of output if format_args: concat_args.extend(format_args) + # Use arguments from ffmpeg preset source_ffmpeg_cmd = repre.get("ffmpeg_cmd") if source_ffmpeg_cmd: @@ -334,7 +336,7 @@ class ExtractReviewSlate(openpype.api.Extractor): concat_args.append(arg) # assumes arg has one parameter concat_args.append(args[indx + 1]) - concat_args.append("-y") + # add final output path concat_args.append(output_path) From c34a1270a29c6d660b1c7f40dcca259171b1a553 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 22 Jul 2022 13:05:58 +0200 Subject: [PATCH 481/785] trayp: adding docstrings --- .../plugins/create/create_editorial.py | 290 ++++++++++++++---- 1 file changed, 238 insertions(+), 52 deletions(-) diff --git a/openpype/hosts/traypublisher/plugins/create/create_editorial.py b/openpype/hosts/traypublisher/plugins/create/create_editorial.py index d6d669a56c..3bc8f89556 100644 --- a/openpype/hosts/traypublisher/plugins/create/create_editorial.py +++ b/openpype/hosts/traypublisher/plugins/create/create_editorial.py @@ -64,6 +64,11 @@ CLIP_ATTR_DEFS = [ class EditorialClipInstanceCreatorBase(HiddenTrayPublishCreator): + """ Wrapper class for clip family creators + + Args: + HiddenTrayPublishCreator (BaseCreator): hidden supporting class + """ host_name = "traypublisher" def create(self, instance_data, source_data=None): @@ -96,6 +101,13 @@ class EditorialClipInstanceCreatorBase(HiddenTrayPublishCreator): class EditorialShotInstanceCreator(EditorialClipInstanceCreatorBase): + """ Shot family class + + The shot metadata instance carrier. + + Args: + EditorialClipInstanceCreatorBase (BaseCreator): hidden supporting class + """ identifier = "editorial_shot" family = "shot" label = "Editorial Shot" @@ -112,24 +124,54 @@ class EditorialShotInstanceCreator(EditorialClipInstanceCreatorBase): class EditorialPlateInstanceCreator(EditorialClipInstanceCreatorBase): + """ Plate family class + + Plate representation instance. + + Args: + EditorialClipInstanceCreatorBase (BaseCreator): hidden supporting class + """ identifier = "editorial_plate" family = "plate" label = "Editorial Plate" class EditorialAudioInstanceCreator(EditorialClipInstanceCreatorBase): + """ Audio family class + + Audio representation instance. + + Args: + EditorialClipInstanceCreatorBase (BaseCreator): hidden supporting class + """ identifier = "editorial_audio" family = "audio" label = "Editorial Audio" class EditorialReviewInstanceCreator(EditorialClipInstanceCreatorBase): + """ Review family class + + Review representation instance. + + Args: + EditorialClipInstanceCreatorBase (BaseCreator): hidden supporting class + """ identifier = "editorial_review" family = "review" label = "Editorial Review" class EditorialSimpleCreator(TrayPublishCreator): + """ Editorial creator class + + Simple workflow creator. This creator only disecting input + video file into clip chunks and then converts each to + defined format defined Settings for each subset preset. + + Args: + TrayPublishCreator (Creator): Tray publisher plugin class + """ label = "Editorial Simple" family = "editorial" @@ -242,6 +284,15 @@ or updating already created. Publishing will create OTIO file. media_path, otio_timeline ): + """Otio instance creating function + + Args: + subset_name (str): name of subset + data (dict): instnance data + sequence_path (str): path to sequence file + media_path (str): path to media file + otio_timeline (otio.Timeline): otio timeline object + """ # Pass precreate data to creator attributes data.update({ "sequenceFilePath": sequence_path, @@ -252,6 +303,15 @@ or updating already created. Publishing will create OTIO file. self._create_instance(self.family, subset_name, data) def _create_otio_timeline(self, sequence_path, fps): + """Creating otio timeline from sequence path + + Args: + sequence_path (str): path to sequence file + fps (float): frame per second + + Returns: + otio.Timeline: otio timeline object + """ # get editorial sequence file into otio timeline object extension = os.path.splitext(sequence_path)[1] @@ -266,6 +326,17 @@ or updating already created. Publishing will create OTIO file. return otio.adapters.read_from_file(sequence_path, **kwargs) def _get_path_from_file_data(self, file_path_data): + """Converting creator path data to single path string + + Args: + file_path_data (FileDefItem): creator path data inputs + + Raises: + FileExistsError: in case nothing had been set + + Returns: + str: path string + """ # TODO: just temporarly solving only one media file if isinstance(file_path_data, list): file_path_data = file_path_data.pop() @@ -281,9 +352,17 @@ or updating already created. Publishing will create OTIO file. self, otio_timeline, media_path, - clip_instance_properties, + instance_data, family_presets ): + """Helping function fro creating clip instance + + Args: + otio_timeline (otio.Timeline): otio timeline object + media_path (str): media file path string + instance_data (dict): clip instance data + family_presets (list): list of dict settings subset presets + """ self.asset_name_check = [] tracks = otio_timeline.each_child( @@ -318,7 +397,7 @@ or updating already created. Publishing will create OTIO file. base_instance_data = self._get_base_instance_data( clip, - clip_instance_properties, + instance_data, track_start_frame ) @@ -348,6 +427,14 @@ or updating already created. Publishing will create OTIO file. self.log.debug(f"{pformat(dict(instance.data))}") def _restore_otio_source_range(self, otio_clip): + """Infusing source range. + + Otio clip is missing proper source clip range so + here we add them from from parent timeline frame range. + + Args: + otio_clip (otio.Clip): otio clip object + """ otio_clip.source_range = otio_clip.range_in_parent() def _create_otio_reference( @@ -356,6 +443,13 @@ or updating already created. Publishing will create OTIO file. media_path, media_data ): + """Creating otio reference at otio clip. + + Args: + otio_clip (otio.Clip): otio clip object + media_path (str): media file path string + media_data (dict): media metadata + """ start_frame = media_data["start_frame"] frame_duration = media_data["duration"] fps = media_data["fps"] @@ -374,12 +468,23 @@ or updating already created. Publishing will create OTIO file. otio_clip.media_reference = media_reference - def _get_media_source_metadata(self, full_input_path_single_file): + def _get_media_source_metadata(self, path): + """Get all available metadata from file + + Args: + path (str): media file path string + + Raises: + AssertionError: ffprobe couldn't read metadata + + Returns: + dict: media file metadata + """ return_data = {} try: media_data = get_ffprobe_data( - full_input_path_single_file, self.log + path, self.log ) self.log.debug(f"__ media_data: {pformat(media_data)}") @@ -408,44 +513,55 @@ or updating already created. Publishing will create OTIO file. except Exception as exc: raise AssertionError(( "FFprobe couldn't read information about input file: " - f"\"{full_input_path_single_file}\". Error message: {exc}" + f"\"{path}\". Error message: {exc}" )) return return_data def _make_subset_instance( self, - clip, - _fpreset, - future_instance_data, + otio_clip, + preset, + instance_data, parenting_data ): - family = _fpreset["family"] + """Making subset instance from input preset + + Args: + otio_clip (otio.Clip): otio clip object + preset (dict): sigle family preset + instance_data (dict): instance data + parenting_data (dict): shot instance parent data + + Returns: + CreatedInstance: creator instance object + """ + family = preset["family"] label = self._make_subset_naming( - _fpreset, - future_instance_data + preset, + instance_data ) - future_instance_data["label"] = label + instance_data["label"] = label # add file extension filter only if it is not shot family if family == "shot": - future_instance_data["otioClip"] = ( - otio.adapters.write_to_string(clip)) + instance_data["otioClip"] = ( + otio.adapters.write_to_string(otio_clip)) c_instance = self.create_context.creators[ "editorial_shot"].create( - future_instance_data) + instance_data) parenting_data.update({ "instance_label": label, "instance_id": c_instance.data["instance_id"] }) else: # add review family if defined - future_instance_data.update({ - "outputFileType": _fpreset["output_file_type"], + instance_data.update({ + "outputFileType": preset["output_file_type"], "parent_instance_id": parenting_data["instance_id"], "creator_attributes": { "parent_instance": parenting_data["instance_label"], - "add_review_family": _fpreset.get("review") + "add_review_family": preset.get("review") } }) @@ -453,24 +569,33 @@ or updating already created. Publishing will create OTIO file. editorial_clip_creator = self.create_context.creators[ creator_identifier] c_instance = editorial_clip_creator.create( - future_instance_data) + instance_data) return c_instance def _make_subset_naming( self, - _fpreset, - future_instance_data + preset, + instance_data ): - shot_name = future_instance_data["shotName"] - variant_name = future_instance_data["variant"] - family = _fpreset["family"] + """ Subset name maker + + Args: + preset (dict): single preset item + instance_data (dict): instance data + + Returns: + str: label string + """ + shot_name = instance_data["shotName"] + variant_name = instance_data["variant"] + family = preset["family"] # get variant name from preset or from inharitance - _variant_name = _fpreset.get("variant") or variant_name + _variant_name = preset.get("variant") or variant_name self.log.debug(f"__ family: {family}") - self.log.debug(f"__ _fpreset: {_fpreset}") + self.log.debug(f"__ preset: {preset}") # subset name subset_name = "{}{}".format( @@ -481,7 +606,7 @@ or updating already created. Publishing will create OTIO file. subset_name ) - future_instance_data.update({ + instance_data.update({ "family": family, "label": label, "variant": _variant_name, @@ -492,21 +617,31 @@ or updating already created. Publishing will create OTIO file. def _get_base_instance_data( self, - clip, - clip_instance_properties, + otio_clip, + instance_data, track_start_frame, ): + """ Factoring basic set of instance data. + + Args: + otio_clip (otio.Clip): otio clip object + instance_data (dict): precreate instance data + track_start_frame (int): track start frame + + Returns: + dict: instance data + """ # get clip instance properties - parent_asset_name = clip_instance_properties["parent_asset_name"] - handle_start = clip_instance_properties["handle_start"] - handle_end = clip_instance_properties["handle_end"] - timeline_offset = clip_instance_properties["timeline_offset"] - workfile_start_frame = clip_instance_properties["workfile_start_frame"] - fps = clip_instance_properties["fps"] - variant_name = clip_instance_properties["variant"] + parent_asset_name = instance_data["parent_asset_name"] + handle_start = instance_data["handle_start"] + handle_end = instance_data["handle_end"] + timeline_offset = instance_data["timeline_offset"] + workfile_start_frame = instance_data["workfile_start_frame"] + fps = instance_data["fps"] + variant_name = instance_data["variant"] # basic unique asset name - clip_name = os.path.splitext(clip.name)[0].lower() + clip_name = os.path.splitext(otio_clip.name)[0].lower() project_doc = get_project(self.project_name) shot_name, shot_metadata = self._shot_metadata_solver.generate_data( @@ -529,7 +664,7 @@ or updating already created. Publishing will create OTIO file. self._validate_name_uniqueness(shot_name) timing_data = self._get_timing_data( - clip, + otio_clip, timeline_offset, track_start_frame, workfile_start_frame @@ -571,15 +706,26 @@ or updating already created. Publishing will create OTIO file. def _get_timing_data( self, - clip, + otio_clip, timeline_offset, track_start_frame, workfile_start_frame ): + """Returning available timing data + + Args: + otio_clip (otio.Clip): otio clip object + timeline_offset (int): offset value + track_start_frame (int): starting frame input + workfile_start_frame (int): start frame for shot's workfiles + + Returns: + dict: timing metadata + """ # frame ranges data - clip_in = clip.range_in_parent().start_time.value + clip_in = otio_clip.range_in_parent().start_time.value clip_in += track_start_frame - clip_out = clip.range_in_parent().end_time_inclusive().value + clip_out = otio_clip.range_in_parent().end_time_inclusive().value clip_out += track_start_frame self.log.info(f"clip_in: {clip_in} | clip_out: {clip_out}") @@ -589,10 +735,10 @@ or updating already created. Publishing will create OTIO file. clip_in += timeline_offset clip_out += timeline_offset - clip_duration = clip.duration().value + clip_duration = otio_clip.duration().value self.log.info(f"clip duration: {clip_duration}") - source_in = clip.trimmed_range().start_time.value + source_in = otio_clip.trimmed_range().start_time.value source_out = source_in + clip_duration # define starting frame for future shot @@ -607,12 +753,20 @@ or updating already created. Publishing will create OTIO file. "frameEnd": int(frame_end), "clipIn": int(clip_in), "clipOut": int(clip_out), - "clipDuration": int(clip.duration().value), + "clipDuration": int(otio_clip.duration().value), "sourceIn": int(source_in), "sourceOut": int(source_out) } def _get_allowed_family_presets(self, pre_create_data): + """ Filter out allowed family presets. + + Args: + pre_create_data (dict): precreate attributes inputs + + Returns: + list: lit of dict with preset items + """ self.log.debug(f"__ pre_create_data: {pre_create_data}") return [ {"family": "shot"}, @@ -622,41 +776,73 @@ or updating already created. Publishing will create OTIO file. ] ] - def _validate_clip_for_processing(self, clip): - if clip.name is None: + def _validate_clip_for_processing(self, otio_clip): + """Validate otio clip attribues + + Args: + otio_clip (otio.Clip): otio clip object + + Returns: + bool: True if all passing conditions + """ + if otio_clip.name is None: return False - if isinstance(clip, otio.schema.Gap): + if isinstance(otio_clip, otio.schema.Gap): return False # skip all generators like black empty if isinstance( - clip.media_reference, + otio_clip.media_reference, otio.schema.GeneratorReference): return False # Transitions are ignored, because Clips have the full frame # range. - if isinstance(clip, otio.schema.Transition): + if isinstance(otio_clip, otio.schema.Transition): return False return True def _validate_name_uniqueness(self, name): + """ Validating name uniqueness. + + In context of other clip names in sequence file. + + Args: + name (str): shot name string + """ if name not in self.asset_name_check: self.asset_name_check.append(name) else: - self.log.warning(f"duplicate shot name: {name}") + self.log.warning( + f"Duplicate shot name: {name}! " + "Please check names in the input sequence files." + ) - def _create_instance(self, family, subset_name, data): + def _create_instance(self, family, subset_name, instance_data): + """ CreatedInstance object creator + + Args: + family (str): family name + subset_name (str): subset name + instance_data (dict): instance data + """ # Create new instance - new_instance = CreatedInstance(family, subset_name, data, self) + new_instance = CreatedInstance( + family, subset_name, instance_data, self + ) # Host implementation of storing metadata about instance HostContext.add_instance(new_instance.data_to_store()) # Add instance to current context self._add_instance_to_context(new_instance) def get_pre_create_attr_defs(self): + """ Creating pre-create attributes at creator plugin. + + Returns: + list: list of attribute object instances + """ # Use same attributes as for instance attrobites attr_defs = [ FileDef( From 2acf9289a14da87faabc79180c5c7a53d4361000 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 22 Jul 2022 14:00:47 +0200 Subject: [PATCH 482/785] global: change reading from instance rather then context --- openpype/plugins/publish/validate_asset_docs.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/plugins/publish/validate_asset_docs.py b/openpype/plugins/publish/validate_asset_docs.py index dbec9edd7b..9a1ca5b8de 100644 --- a/openpype/plugins/publish/validate_asset_docs.py +++ b/openpype/plugins/publish/validate_asset_docs.py @@ -24,7 +24,7 @@ class ValidateAssetDocs(pyblish.api.InstancePlugin): if instance.data.get("assetEntity"): self.log.info("Instance has set asset document in its data.") - elif instance.context.data.get("newAssetPublishing"): + elif instance.data.get("newAssetPublishing"): # skip if it is editorial self.log.info("Editorial instance is no need to check...") From f5f7e52c42c9a43a4746683ba7cc0904fadab661 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jakub=20Je=C5=BEek?= Date: Fri, 22 Jul 2022 14:01:48 +0200 Subject: [PATCH 483/785] Update openpype/hosts/traypublisher/plugins/publish/collect_clip_instances.py Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> --- .../plugins/publish/collect_clip_instances.py | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/openpype/hosts/traypublisher/plugins/publish/collect_clip_instances.py b/openpype/hosts/traypublisher/plugins/publish/collect_clip_instances.py index ca269a9c27..bdf7c05f3d 100644 --- a/openpype/hosts/traypublisher/plugins/publish/collect_clip_instances.py +++ b/openpype/hosts/traypublisher/plugins/publish/collect_clip_instances.py @@ -13,13 +13,11 @@ class CollectClipInstance(pyblish.api.InstancePlugin): def process(self, instance): creator_identifier = instance.data["creator_identifier"] - if ( - creator_identifier not in [ - "editorial_plate", - "editorial_audio", - "editorial_review" - ] - ): + if creator_identifier not in [ + "editorial_plate", + "editorial_audio", + "editorial_review" + ]: return instance.data["families"].append("clip") From 409cd5b870b9ebf7acc70c752c5b900a72ee9fd3 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 22 Jul 2022 14:07:44 +0200 Subject: [PATCH 484/785] trayp: processing PR suggestion --- .../plugins/publish/collect_editorial_reviewable.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/traypublisher/plugins/publish/collect_editorial_reviewable.py b/openpype/hosts/traypublisher/plugins/publish/collect_editorial_reviewable.py index 34f7a9ead8..4af4fb94e9 100644 --- a/openpype/hosts/traypublisher/plugins/publish/collect_editorial_reviewable.py +++ b/openpype/hosts/traypublisher/plugins/publish/collect_editorial_reviewable.py @@ -2,7 +2,9 @@ import pyblish.api class CollectEditorialReviewable(pyblish.api.InstancePlugin): - """Collect reviwiewable toggle to instance and representation data + """ Collect review input from user. + + Adds the input to instance data. """ label = "Collect Editorial Reviewable" @@ -13,7 +15,11 @@ class CollectEditorialReviewable(pyblish.api.InstancePlugin): def process(self, instance): creator_identifier = instance.data["creator_identifier"] - if "editorial" not in creator_identifier: + if creator_identifier not in [ + "editorial_plate", + "editorial_audio", + "editorial_review" + ]: return creator_attributes = instance.data["creator_attributes"] From fcbf46d345bcef7363bc4f590d476136f478b6ce Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Fri, 22 Jul 2022 14:20:56 +0200 Subject: [PATCH 485/785] Add normaal animation --- website/src/css/custom.css | 4 ++-- website/src/pages/index.js | 16 ++++++++++------ website/static/img/logo_normaal.png | Bin 0 -> 13468 bytes 3 files changed, 12 insertions(+), 8 deletions(-) create mode 100644 website/static/img/logo_normaal.png diff --git a/website/src/css/custom.css b/website/src/css/custom.css index e8dd86256b..58c9305bc7 100644 --- a/website/src/css/custom.css +++ b/website/src/css/custom.css @@ -196,12 +196,12 @@ html[data-theme='dark'] .header-github-link::before { padding: 20px } -.showcase .client { +.showcase .studio { display: flex; justify-content: space-between; } -.showcase .client img { +.showcase .studio img { max-height: 110px; padding: 20px; max-width: 160px; diff --git a/website/src/pages/index.js b/website/src/pages/index.js index ae7119e928..52302ec285 100644 --- a/website/src/pages/index.js +++ b/website/src/pages/index.js @@ -65,13 +65,17 @@ const collab = [ image: '/img/clothcat.png', infoLink: 'https://www.clothcatanimation.com/' }, { - title: 'Ellipse Studio', - image: '/img/ellipse-studio.png', - infoLink: 'http://www.dargaudmedia.com' + title: 'Ellipse Animation', + image: '/img/ellipse_animation.svg', + infoLink: 'http://www.ellipseanimation.com' }, { title: 'J Cube Inc', image: '/img/jcube_logo_bw.png', infoLink: 'https://j-cube.jp' + }, { + title: 'Normaal Animation', + image: '/img/logo_normaal.png', + infoLink: 'https://j-cube.jp' } ]; @@ -191,10 +195,10 @@ function Service({imageUrl, title, description}) { ); } -function Client({title, image, infoLink}) { +function Studio({title, image, infoLink}) { const imgUrl = useBaseUrl(image); return ( - + ); @@ -490,7 +494,7 @@ function Home() {

Studios using openPype

{studios.map((props, idx) => ( - + ))}
diff --git a/website/static/img/logo_normaal.png b/website/static/img/logo_normaal.png new file mode 100644 index 0000000000000000000000000000000000000000..711847c9f2f95d77d46d4ab98a9287e5f0ef771d GIT binary patch literal 13468 zcmaiacOcd8-}lD~$yS7f_}ZIr%*x&?WE`tQ_BzHfLMf{wo62dJsmzS*O(EHY%pfrZalNk-W1y!_d6xMs1VNOV8Y)H*M7#}tA0|5k{uLju zNCSVUb~Z6bo9pTz96a5H?QVP8ql5$9y}&vI$twkV**UnP(CqdoCua`@PJC@6C%f}) z1x_<5T~S@H8>liwIQUJf({O zzr%kP;bcGEf_7ElG}kp?zv1bFVwVz@5*FoDJj*WcbK4PNq@wnZ!{Czw=N&ZK3n3yB z5D*|7AR+AO<0K*`D=RA^DlQ@}E(CT6`C>fKc7Z}3zE@xpf74Jw`8xPGd!d~@J=kHI zcJ`irXa!DAu+IJu;%H~be^dAH{fGL%5F+r3h?uab$bW7}1v>vP+u@b}w%zNFC)(5Z zj;GhZ6!*?p?fKv5Gd7!VHS^~BULet&O38jPba6;cv;1m~=_`g=noNvPmV&Ye&|Gfg! za`S_oanr*Ya2NmglKH>a6gZ{D0SsUlopQSMKQ6)6Mi@8;qTI|?oZV3#zJTg~@cZ}j zt^Zj5r<|v&H$2@ueT)IPQQ+JuyMK8Dak{4Kc^fSKt=YeqG;iE6@bPqXb^~j^M(Wqu zHC1n16_dSsRY+X;Z!*Aj2yoWf5u;*<2D`*X0YXKkg~Y{8#KaMzVhAw_0a5VF-#VV| z1`%=F4sG}U+YjTKT^^j!)kSDH`=UL4FsFa|8|=m?@4r9&{n5?&6y@yfr@ltmIlzWj z;PmqGyzS?Jx_zoKAousGucsqAz|IGC%?Y?!f%BT9qcd<3h8+Zuvy%tPhh0oqO!Oc6 z{p(P`9TXt=zpE$m58*`M(EF!!@*@9De)-c4{}HR;+~0o#IRGRRk$+_q@Zn$i2;~8i zkPpZ4)UJFh5e7;;mX89@ZS-@9hNq_vMWh`F}LRNt}H!pO`L2SNo^xe(F$;OfLl)m#N zl4q#}-k8#`(LXmAdi@uktcq`CENSCOiFz`{+36=ib#29eUDPOX9XGZR-Cs?N=? zeH4xDG}t3fuhzPIOmjv_W%|vXbs{g1rh}WRarl<@ztHk;#Dsnc498d|YX-_aSd_g} zPjGm>z-b{=kBz-X*v5}C_^|KF5-v{Z+LY5UVb5h}P&UZDG;aAq5CuK_LjXO^WP%`e zNK@sSNnqyBZ$DEf)G8}ymwL8NTBYwrT=CK;et#)qf;Ji@?#_#`R?4I2|KC-(-+f4l03M- zQdFe8Nk(5;N!}{TZoE7-zc{PV|6FgNqC?C}U`21c<6C-Eqwn4Zvt|3yXAW%P$kzsS zYsHnd>8{~>evf?I79^$`K#NB(M%Y^bV-^6`dz% zJ=#CK!F}>PWDAugmayEEi)k_G4Lsq}b8#7Po>U5c{I<1n#cxDEj-lvg@2_aF^Al?G z)i3vCp43e8bsU7Qd3={v^JpveqbpuGxqo6g|H-J}D*yc1k33?_u4RU@X0Fs`T4S3d zxhX$`g}OP$Zak^nx_qTG&As9Aqg1O;VM?KH)t?Vuc_(T#cW?h;U1CO68(my_<(n>I ze`3a8hSf^(&#LL{tYf9@Gf)WX$i_0H>6TbCKr1p`*h$SRYI0?cugnaxRlh|ta^q14Y;}^2sg&BWcNcHMRM>(uF+1w59ng*;xBb z4GD!Z*3qrY)O=~r+SW+AQy6Llt+nJEJuiM$<5uiGiIc0GTsZ2Jcyct_SXI|$ zoasV4dwXUQKaf;(@w>_DulkV(!ITHF;!{cs76cy`O^@dtSFgVAet%ZHxR3 zT!yQ_iCBV5wRWvbNy4Wv!AuLmiTt8I(_@=4ZEe22pEb!RHMZGI#^o%-y%z)0Jft}8 z0~2Vz?dC~+f8wHAE}Y?>RuRsMICz{c+g&T@O5i)>9~?JP{+3iCH0^{*quMLvJ6`X2 zerCugebd)0zwK97em8K8;w_$&T`!Ln@lb!OMKwR`CkmC5F`H&@2CDEvg3?P3TwL_- zbz!~lnG>1;KMDtdH0C@NMeBwfYlVzit>fvi)dv(*4S;QguXg&%d$atVm$Pw4_-o~< zCqXh*ZW5vXf(~!+{NvRNYR8Q?Lkf3(1`BlK_TArc%mq#_n$v2$Nj{OeRfb+MBXQg~ zKK2z%^C7V-V>t-hD`51HnuV?6FqsUk`!xM&+7fC)ZsS4RX)N@F{%v+u@$kdznyia5I??wdns22z2evF2XTLmR;Heg9X}W*Y+w;I zbl^!<*`J%$607aa7X_S_vm!Bc-!Qy`YV+9A>cq7wDqQTM6{!Iop@RHq%z81M-&}CB zc!kHb^qal+saYvQjo2tlmHdrs&F3id`(@6yw zY5u4hG2I?=?4a{{rC~RvMoh@tnc9&*Tz2K@!*$ge*C^%U#biaD(HMu!B=p>2llY>w zvLo8Kup6+5*3+N4p4cL<`MbRqQl5{GO5Qj?x}MvG9#KWGv3tuUCiSI8<+`lrB&|}V zYYi402kQg-D&OvJkvIOsB9($=$BSFiE)(cm#}wUsRMZxuOOEJIjamRU>p^&*t4q63 zGvY$s81XLa0fvs$eB(w!dWR`-P?K>nw{E~LA zy)4>}&Dec5f?cGrkZdPB-=qB($$Sh}cC@cd&q}hxM+j}C=^m-W4eV;#SAd0BKXTe< zjK)x%c^Ahryi|IgN4Ke24NYvB`niyHz7K6Ef8s9ctU=sDUx@ubEhd2Ool}Dv!0P2)L$@L87-mRqadQvalh0y zFGD|XtBV_(o(INn>LRnsQ$@~e15mI50?OAK1zKn6>3f&OSx@Ot$O`$R0fGr};km$?BA_0M+rh;`f_mgU~=LV9b=2dg-o3Ef1VmBrdt3X^sdXFo9G zebJou(1|1KI2@vccCc40%UAI7h>#KROJ+F}qOiG+!mGsa*`=5!RTr}D43{_)WZWf9 zzU5w`oELQz%CtJ}&nFi~VklZPvWNNZu}b8RZtb8t=tYew?WR)qH|Kp=ydN}h;c{4( zm?2N};%xXI6Spw9~XqT#FE&JLOi|&Xt*eb@_8J*Y`?-(puq;@$i^%a7iT8QDkUK!dvT_bn* z>$SqL;~@*=3!KagQpW=Bo_V#|55(4(g)BMhLe~R+g$75?N!?e#BpUgbax zO{X3*($3)Nzx4pM+$F&B4Rgo3+iYc12+<|niBQo3vPuBoaDwO~Ad8B)=yG%c&jIQzQ%^iXhGcp`M8##&822w6EG`asUA^()LD5 z4E=L%gm-3iIUspmR7#u@NJ9f-P}Ln|4c6Coh&;~@Z=ri;<#9!%^6F7vL*7Z`s6|VO z1Au#R3r}8xlOIC`2xJ1vyjk*(n;W47O&{CHzp&yyjK%GI=o250rYK6c7gVsX5fe4U z+C7iCbhcN*n?#WLUJw+hlK&O`P9R5(XLu9{FtfZBa(jvdG8|%B)U*n1$4uTAVtZ~* z2|d=y+|lWenZKR`f{Njx=qjA9u3gPCrp?G15q(U>mTBCb@;O)&&;3VmMP2hmf8a#p z_h@TlW8#kCz|`BOHsH(KRiiR_FXEScwF~Dj$wbA!-wt$9@Ui8e86~qxb9lN{#iZY) z5@uGDbI#S{?7sX`+OxgD5gmiC`kz{m-o1#(%QRXhr? zjh(Ke;2m?N6=hdI6vRv53Ge-`3Lqc>d&0?FsJ4sR<1cio;+QwJ=F#Kd9u#jQ=gZp; zwJCg>LM2yskPiE4YZck?0k&u9c@#U{+88Xq+W}L?&c8>Qdrq&mJz$!7_q0`>&!6qWW{!nD(Dy7)4vv3nhd$`brr{SGG~)Y2jvocsA0zD zlbyVZ0!U>yn`>#61yN;CX4)d3H_rKV`qGpyPfcoHJdO$f+PM202ED)`4MNY0rM*_E zBAu7=%yY6Y5sc+@i`Fref2o1`K)knAd+p?f9$wCD$r^e?g94&u*#B5F$`+um?$?yU zf)0k*IOz)jMtD*M-J&Mz3UfWRWzNWM1V0&%6<`ACql73Co`uctyQz^{aODe69h!ty9ceLXgq;TC zg<$iLDE^LE&je`6AgH|AWa-ltYm9j^upA9RkN3OugXJA!ctdiI4ak@fWUh%l)k+9x zWdMF&k>xGI_NA*}UVH*7C_tI9S*)qqDK*)aeeF2=!?ev2xD7MFbFJE0ky6 zsfb=^zK>xvAcu*&RqHE)pdObL*A7x&8j8$!TK)vC*| zNJwCm*mAa}^5X;JtU>y~4IIc0)k!;=1L~S#n!m5fuK-&Qb+e^4SHuo3cim?M!ioR2 zGF3~aPHeCOP^Qec+;ccm(IGHQi~$(*=Z0-XDgykDvY>tssCU7e$jRV=^*D-uyA3uI z?A5Mv`OoF_jNC@97Z_!y-3Cl}U?$&M2ROn1*9!xJ2$N2aTR8ErMJ9ve zt11bZz)~}jGkgj#ehKW@0ClZX#5@NqwJH#PPxxM~xDR;%JlI46!64C8T6ccn!#>xKNhIio45+CdB8q9iX^RoiB{6h-_ifQN zW7ATu(nl6Pumr{dZ9UXP59EFcQXOQ#{ut(OaBrj~uL7n4aw4$5~(!g2Ercp!v-2_!S^ZDvjg4e5L z#)B9M7<$TkW9Y0gyd*f2wL9G3VJ^TJ5b|_S7Co7aCw5)_ZXBKQHDQu^3QlNi$z5teIxB8o+Ahalv zt8fbhb1p~7Ewj&SwToX&`SYZ*HK&0SpgU8ZNoO?oE+P-L9HKj$rM#ZB;w4D{`=n@` zNieptvytnMvBTKZLF)TxzHe7^xj_9O++%KxI~g3yb$Q+{XbVimwhSy0IH6IKjK*z9 z$zeIBc-rMOiL`EO`L=*gQ_EqBq-O^?u$OdqB=GW>8THArn!`uN2N=c#%^bc7IZY+4 z-{~EjzS|r4Y3<+3ro$m-bA+_aD!_QW&1&<;jw0l{RL1^F8o%FdvSFm6(LN7raz6yH z0iWsSP$(9o5#_;gkkkMA9Xj_SxR}Xq zYIe_LkVF7nv$Ak`?wxzgMBk;@O#BOYU&$w&8+0Rpct??u7^xMhskmh?{Vbf57{!w` zT#4Fx*I{}?LovfDfMJj5E}LU;JNZX|-nJ}aE#LL1J*P_k0f5r*3OEo9ikg8WyT3ju z;a~g-=F+aYo8`Z-`y0W8DbC;!jE@^|mX*$oqg#a0W^dRrXd`j#mQu>qe(OY;4#r)I zK-YP1o%g}N@P86dE<`vZ8C5_F=@7Bz@sl$_H%0G_-QtTA{dh19#}m@6GAtd}w?0zR z&gU`U1t^2Mq@4KA*}J@w6Y(*d>a9NW9R(&JlKIJ6v4g|k`1BzE_)~vuSGnHU;@^s< z>m|~J6M#Y9Ag;yko4N>VTzt3rRw%6OE>s6nupR;--N)3gV{ZKJO#sy)iQ>fN!q zo`Gx}mn#rH2xikMT@CF8t?Hc*Gx%e5G2sDF9-1sN94R2>TSy>+bfm^{P=D$uA<*oh zHsZ%a6V=;}QzTZrT`51c^N`d9u^)eEiQxy&_Q082S$0aGmq21*O*P>=R=I+*B4FW? zO0_34Mf3*)Y?|47jV!3~N`|F9N~=g%I`PRkc7jI)`t@YSmbBdRHIk3w=$cfhY&eJS zt^_g}F_Z$nkU0@tP>z2w=Z~(}Q$*AviM5bqMiT&|I+8Lce=1aJqGo+Tc2L=?Ibyo6 zr~i^2hS&b#EpG&G!sS;3zk_JADnW71m>u}jqS);Et{u!Bc(uGF8Mc^R0l31YU}Vg! zrOvUkJ6x(KOB+!OACKp~WXp#rpem+s2DeW^29*4t)ETL)N8Eh9dxV0pvf+>GJL2o6 zs;N?9yqp#4$@DD;jCfKRoLq9iq(ynJL`9Sk!*Myz=IaZohY7XPO^<=u1 z1AKb6l3YPJlR>{x8tyv^(c)5p8v7Q82!(7cx9wFJY$K)VJ8BN<$P{{k{Z-Yz6qX2Q zOPIF5*5Slux&~9d{+a^#v(Xu^8!e+<1;&{(Z04uVI8t=^(GiTdCTrnv>&j=eb9T)_ z8|AJ~>PexbL0@nC5Icts%VpJODX7M^lD%j(O>-h95fpJ?h7s z(SRF15xzyMm^&EfRjvx}xRE!C1(15e3?$YW@(~qmKf4)m>r{@;w!xtUH4(zsCS+<8 zLq0q=dq;IY1L4_?lqHFSzYW1MFrLcrn>`Q1PI>)x_bw-cdn%k*Xnyz^$9Z|)fKeU1 zP&=%i;w7UMP5pNiAkC5#`Ffj`x`yM}SB;3X-1!w)gZM|$WA$@xN?IokSvn-WvK5G~ z@}C2pbR|qMx)1!4)>YEkgw>fUtcqpq&cJE*udc5E9xJ^9_0K?mo5Aa}H<_?mVI14H zLN0h60I(^jh&$9Ylb#?YpD|*{i1^h{zz4x3-gq~qAQ9+JfoQL0Tn9(?Vh3?>^C5eR z#3F2maDAcgrRX>Vmj-0yOQE=dC);L=qmStt(EL?#3SH(F4i1opyw-6Z0zffM>xItR5Rnu zw~I+T77lyO0{H7Ry_VPr(cU%H{jcDn1M1Sq#r*pnmWnCs(@Ba!jNXy3Rhh3%l(Yor z^eG6+jq-=Loz}d4-W9B{(%7n`p533D3wnbXm$%+7!@)}k8f}pk>Gl2Chl@f6uKP zWm^5CE#@&0B2e>ARA1!Y8(%W{iAuvvy*IDQ_B6q8Ft7u7?2h?n{RdCB892IToQvhJ zO&Nnu#N;4iN^O#ea+}r^HxgI!-p}x!yz{rvp^~{^YmlV^|I!K=eM(@t(rHSXSV_{v zb=srVk6_BCkYa=igZl>OY~LV>P4j8ca8A1e#tK@~jBF|Xjjg;9G|d~{pn**Q8iR)I zg+Rk7N1x>t`MNgGSZ@jEl>2?Fmi1(^*LZI7;j2J}*=S3h`|&KQYuXcxWFZYcaCa@< z4Bsu3Q6>s!KAk(v-@mF=N&5|xk&Wh_#K zXIS=rYp}4PmNXoTiR$T-GaJvfQ-)T^tpv3)_6{jd9ur-Knylnf!^D;u!90ViQzi0g zpotb4x_4%$!7xwlX#hb|O%(UN=2?Y*21kJk`L@5vQDboX*Fex-lrFYPr+qNWOeRfv z#dz&P#9vfd=$WGhaJu&e;$YgJv&(!dQ}Gf?UiFDEW#)PA9=WC5UIhq>=c|{{Pb#XX zP?iyjx>ZjmAqH9(`;UMvaxgeubZIZYds`kn{3+cTP>RGFYM;5#)6dY#7{@1m2K>w~I z8OXT7t_98eU2fxT)+e8ypRsN_2SDysFy2yEA6N#i&*4LAeZ$vnV=&7(PxDg{y<$;& zbw-(%*5{^317vIuyETBs~elS4n&xM9F&7 zHDfdwxB@bWM(&c>NS#Ufz=|^g@3Cj<+e|}>*X?3#H-u+m>bqE`^WnCP+l6x5AB+gW z2~w#CHub9%Jc$oM030>(z5!oF+kn3dVCX!1D`g{hFlg{~HrhQ^Mi;F8wuSUFP}~y)56m)5Re1UsHa>oeE}XKp!C<^S?0V` zKC%3;Y-3o;eMYEBUMkYIewCa%@dC~yJ!U|+Vr0)v0OZHxE#aAHFhz(#v}0_kHn3m{ z8xy?5kq5?WUkeD7aY3mUIeQFmu*b+}Sv_K{qmbdyk{-60_D%5$1S&@3<|Kz<4UH4oY;XLNhGjm|SXY z1d-msr_K7$oApa$@D20YZfq?jmQ{tbk(cqx$!KjjJ(wXnmdq5tFj}&1A~$sWt@70a zng$cwJ|s=Yf0fY4bN~ZEi1>mS!&-ag<$O>KB&^={yqpKw#PQ>$CY7MB zm66);D@6=ET<|b07fezW`Fk@%XJ*Wz_I{Ww5)eS_g~-i&rO^ajFcNzDG=r9`+axU8 zEuyOP)hrfA^;e`*Z{pPk3Ik{*0Qn|5a4RmkSKp!Rxn@P$*Zwou-`==LzJxlc*)St_altiLJehQ?1l|4w3!%los zT9|0tO71AV{5?S#Fle;I;et_+BrhwAdq>ybZ zA>A2~Me>>SD6MH@T-RD(oPD zLju@HSh?bGKIQ&_ce|kkwaA2bjMkXITWxy$R=wi&9_7ONR^XlirQPA~N$yyew*?;1 zsp<-QuyNlW?Z^rm6J_T5u4xf9GS*q}EEpbV_39Za$U4O}`@^Ltcg(fzrqsdwHxf6e zJ6LD@{8EpaLxK}Lly3wy!(&re({}ll+#;3oK34FJmU^ep`WY&=$VUzuWmJs@8ju07 z^pq!pDcV|O@mJ?_7yZ4n&y`qd3-JGZv+xmEr9qS!CYH=18d$FjjvpA9v^RK4UCvsJ-xA~RGF z1&v`H#oQ!zYdy{G9Q6HvH$R&8(N5X8TW94O|W4}Vf5uLJJVO{wR9TSTu+$12=^tCr^<_AD4X zzp0O7N*D59ykvTITrJ8>elN?I2=d{}l`CeP2-4{+*D$v$zW-tD`Ae>-iQA!lfo}za zk4Y`gZF8|jMu_97Bhxo)Ny6)WL2n-pBi=h&#;`KV@k7P0G zy{dbX552&HQ?q#s!C)b6bK#JGMiu?wFN2QH4Vz`S{X)W~r}8ym7~0B;Hk#$D-M|kn z#1H`)GF~Q^yMy6%U>0A89)y1uI*&px5H!_{tb!5~GuRYu#8_)Q5d?rHRF z{zz*bl32EIyWy)evT}5cgAg(wJslR>3Fo!U7HGGmGYp~)!No(xwlE4qx7T|g4+}Bf z9zX=Kjns?{#}&!8=(i7Z9M8l5dprf|lcn`7&Wbkz88>Su@AH0}Cu~((T4>+{Xzm5Po zKJ!M#-ZE0Bv~9VNi2{j?;lo1)St+OQ9dJjh`b0~t#v#0 zHF8i2|IGlT2w)G2ZeYyk{r6^bF}Z>8Yj<6wDBhKrT;>Dg-WN;yH5RBVJqsS7T7iaJ zx&LZXO-xu?aMmoVFx$&JkrB7H)jG@gJxcF%(CBXELtpXLL;Jb&X`y_^OQWO2`% zR)NS;(Qi8f%hPwVndqzQ=msYU*pupWUPv`Q`<*(LJ|~VgvOD{1!->pU6!gMS=JkvK zR_c}}V{Lt_jWVQ;!zQI!W2M7aFVa}K4k(kb1 zfF>DVJu5Gy$aAK1P~tc3-^lr)uMUSDt)bB;-0^R{WDy0Toz;9_b-s3(JUY*?1`^5H zicy(e{CF)5!vf9nw-^>tn=Hm_Y|xOc$jYJ}W{OhcoedQ~=qd$80~r$%K%bJlvaox( zaKa@&$iukA@Ll+i63^^cMC}|sazCkQ7iTSrgVI-26G$Pcxoc`4`Hc3r4Cjf4{CoX! zKXsR}$t%&Ozzus%7IkstDnn5@H!`C(bEgDgLRo!*x zfh16dNSEfx%T2{@xoPpF3D6Y;o91wm;b-8jCM4N2^nui<4Z``H%un63vDpm#I+puK>nrw(M>$V%fsawsdu z;?lJr*Z>ZatYBL3hGxGZY|9Pz#<-d)zm#C)NoQ^Nd2kd;q+P@@F2oXq)Q^ruRQeWW zGk8iBJ?7nO0o~jO6#+((lP%$ww1toIO%-_(SnZ}T^<-S4pX)oIw;e3QdKH5*yZaqg zaOZooqEC~%U$^2RZrqs$bjDED@2D>7H=yxhn7?ZSoXWnk>1?C)rd~kTQ%djcrOdUa zZ1BZ7?jtp=*Rz@6G3uJ=RB0OcpZ-T3g~sch>0iyDoz?P>6>IR9I0@DSgiIMYh#!|x zK6L>NRMObVjRj9ZEt!4n#_c!FwNMDEZ!C9DZTBbioJI0spmYQ3v~83C=S(^j7c)T% z$qs(+1)?d7=e`J8HyQmjUf-pZ7Hi<31zYAE&vEcdqYa-L7VZFbU}%la07gt)&H^j= zd+A1Om+$hN_<6fjckfqwS7SR~uEia7YOQ)ZeHx+j`H*%ZgK@Sy?ER&{-61zpX884R z=b77!$uDvT#iJu52ijWGQ&aT@fWvg-`6Z4^1>n1th%o2s-o5^Blb^!s%d9HhsBIRP z2xDa{Ckt5rI@$GhDRq0n%;)#!%1|x6gD_nse(Cgklery4A2XTo=H1w%&f(I9xQ==3 zIC{&bN~Camq=ixQH`j?@m`kZd#WaJ#e8IxEC*!WSw`}nFXpfxt>(eyr&-d@sovhYEZ13y&-LRg4K2R^=_FXLGUe(NP`GRI zabxenE8Mq_VSg$sdu@EU0VlU7Wq85ey~A7XXIcjWpU>7GKNd>cln55s97{*deh9Hv zHq=l5xlKq>VtC8!m14m+%DX~EH$e##?LfwPU-)Kv>=5%)Zep*(ZQH}klikU2MA#?ORByxk@@dZ)M#QMexcAufo3In>IfRpJV`g2riPp<1 zg`2hnS{HXRmxoc=-jmZ^CrxuSGkzD9L`JGoPyFNl$wMIx@KEE6XmVetwn(r%D8|Fi zsoXjdJ{~Cxb>eHB9=Tt4-zdKI*i`NipEM%deX!uh^WjtR&wV}btGA`M4(UA0!VrNH z*y(ND*5%tF1A$@jg=91?PWFe2?bwhi{1MflsMu6#JoYG0AkaBWUrBSZ%rcg4!>Fx5yi-ZM z0G6>+QQ*XXUe790Zu0)V-odE!TFBFicoC|@UY6X6fTDxLKkvN5npIy6jvR%C^~-ov mxrUV7^Rp0{u+ Date: Fri, 22 Jul 2022 14:21:06 +0200 Subject: [PATCH 486/785] update Ellipse animation after re-brand --- website/static/img/ellipse_animation.svg | 9 +++++++++ 1 file changed, 9 insertions(+) create mode 100644 website/static/img/ellipse_animation.svg diff --git a/website/static/img/ellipse_animation.svg b/website/static/img/ellipse_animation.svg new file mode 100644 index 0000000000..c1caaa6726 --- /dev/null +++ b/website/static/img/ellipse_animation.svg @@ -0,0 +1,9 @@ + + + + + + + + + From abfe580eeed15293d929cce4170bb41862a33868 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 22 Jul 2022 14:27:32 +0200 Subject: [PATCH 487/785] trayp: adding docstrings --- .../plugins/publish/collect_shot_instances.py | 60 ++++++++++++++++--- 1 file changed, 52 insertions(+), 8 deletions(-) diff --git a/openpype/hosts/traypublisher/plugins/publish/collect_shot_instances.py b/openpype/hosts/traypublisher/plugins/publish/collect_shot_instances.py index e6f1173bc4..716f73022e 100644 --- a/openpype/hosts/traypublisher/plugins/publish/collect_shot_instances.py +++ b/openpype/hosts/traypublisher/plugins/publish/collect_shot_instances.py @@ -4,7 +4,11 @@ import opentimelineio as otio class CollectShotInstance(pyblish.api.InstancePlugin): - """Collect shot instances and resolve its parent""" + """ Collect shot instances + + Resolving its user inputs from creator attributes + to instance data. + """ label = "Collect Shot Instances" order = pyblish.api.CollectorOrder - 0.09 @@ -50,6 +54,19 @@ class CollectShotInstance(pyblish.api.InstancePlugin): self.log.debug(pformat(instance.data)) def _get_otio_clip(self, instance): + """ Converts otio string data. + + Convert them to proper otio object + and finds its equivalent at otio timeline. + This process is a hack to support also + resolving parent range. + + Args: + instance (obj): publishing instance + + Returns: + otio.Clip: otio clip object + """ context = instance.context # convert otio clip from string to object otio_clip_string = instance.data.pop("otioClip") @@ -63,8 +80,6 @@ class CollectShotInstance(pyblish.api.InstancePlugin): descended_from_type=otio.schema.Clip) if clip.name == otio_clip.name ] - self.log.debug(otio_timeline.each_child( - descended_from_type=otio.schema.Clip)) otio_clip = clips.pop() self.log.debug(f"__ otioclip.parent: {otio_clip.parent}") @@ -72,6 +87,14 @@ class CollectShotInstance(pyblish.api.InstancePlugin): return otio_clip def _distribute_shared_data(self, instance): + """ Distribute all defined keys. + + All data are shared between all related + instances in context. + + Args: + instance (obj): publishing instance + """ context = instance.context instance_id = instance.data["instance_id"] @@ -85,6 +108,14 @@ class CollectShotInstance(pyblish.api.InstancePlugin): } def _solve_inputs_to_data(self, instance): + """ Resolve all user inputs into instance data. + + Args: + instance (obj): publishing instance + + Returns: + dict: instance data updating data + """ _cr_attrs = instance.data["creator_attributes"] workfile_start_frame = _cr_attrs["workfile_start_frame"] frame_start = _cr_attrs["frameStart"] @@ -107,6 +138,11 @@ class CollectShotInstance(pyblish.api.InstancePlugin): } def _solve_hierarchy_context(self, instance): + """ Adding hierarchy data to context shared data. + + Args: + instance (obj): publishing instance + """ context = instance.context final_context = ( @@ -157,13 +193,21 @@ class CollectShotInstance(pyblish.api.InstancePlugin): self.log.debug(pformat(final_context)) def _update_dict(self, ex_dict, new_dict): + """ Recursion function + + Updating nested data with another nested data. + + Args: + ex_dict (dict): nested data + new_dict (dict): nested data + + Returns: + dict: updated nested data + """ for key in ex_dict: if key in new_dict and isinstance(ex_dict[key], dict): new_dict[key] = self._update_dict(ex_dict[key], new_dict[key]) - else: - if ex_dict.get(key) and new_dict.get(key): - continue - else: - new_dict[key] = ex_dict[key] + elif not ex_dict.get(key) or not new_dict.get(key): + new_dict[key] = ex_dict[key] return new_dict From 69246a76b4d6e494c563c828bdfb203fd0e80c44 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Fri, 22 Jul 2022 15:01:31 +0200 Subject: [PATCH 488/785] fixing the host condition --- openpype/plugins/publish/integrate.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/plugins/publish/integrate.py b/openpype/plugins/publish/integrate.py index cfaff4067b..1ddb694f85 100644 --- a/openpype/plugins/publish/integrate.py +++ b/openpype/plugins/publish/integrate.py @@ -229,7 +229,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin): skip = False for item in self.skip_host_families: - if item["host"] != host_name: + if host_name not in item["host"]: continue families = set(item["families"]) From abc5c9e69b6cbdd0627d229e7e8294c159cef0e0 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 22 Jul 2022 15:37:10 +0200 Subject: [PATCH 489/785] adding codec args for keeping continuity even wtih audio stream. --- openpype/plugins/publish/extract_review_slate.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/openpype/plugins/publish/extract_review_slate.py b/openpype/plugins/publish/extract_review_slate.py index 90dad00b97..69043ee261 100644 --- a/openpype/plugins/publish/extract_review_slate.py +++ b/openpype/plugins/publish/extract_review_slate.py @@ -321,6 +321,9 @@ class ExtractReviewSlate(openpype.api.Extractor): if format_args: concat_args.extend(format_args) + if codec_args: + concat_args.extend(codec_args) + # Use arguments from ffmpeg preset source_ffmpeg_cmd = repre.get("ffmpeg_cmd") if source_ffmpeg_cmd: From f0ca08b4959dde095b5ae4599cdee76fd8ac86f2 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 22 Jul 2022 16:30:50 +0200 Subject: [PATCH 490/785] nuke: no need to remove slate frame collection is already without it.. --- openpype/hosts/nuke/api/plugin.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/openpype/hosts/nuke/api/plugin.py b/openpype/hosts/nuke/api/plugin.py index 925cab0bef..37ce03dc55 100644 --- a/openpype/hosts/nuke/api/plugin.py +++ b/openpype/hosts/nuke/api/plugin.py @@ -181,8 +181,6 @@ class ExporterReview(object): # get first and last frame self.first_frame = min(self.collection.indexes) self.last_frame = max(self.collection.indexes) - if "slate" in self.instance.data["families"]: - self.first_frame += 1 else: self.fname = os.path.basename(self.path_in) self.fhead = os.path.splitext(self.fname)[0] + "." From 0aeb10b78d204e6e3778e8f7dc1078fe9bad6068 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 22 Jul 2022 16:31:12 +0200 Subject: [PATCH 491/785] nuke: no need to convert to int if it already is int --- openpype/hosts/nuke/plugins/publish/validate_rendered_frames.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/nuke/plugins/publish/validate_rendered_frames.py b/openpype/hosts/nuke/plugins/publish/validate_rendered_frames.py index af5e8e9d27..5f7b1f3806 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_rendered_frames.py +++ b/openpype/hosts/nuke/plugins/publish/validate_rendered_frames.py @@ -98,7 +98,7 @@ class ValidateRenderedFrames(pyblish.api.InstancePlugin): self.log.error(msg) raise ValidationException(msg) - collected_frames_len = int(len(collection.indexes)) + collected_frames_len = len(collection.indexes) coll_start = min(collection.indexes) coll_end = max(collection.indexes) From 4ac8da4ca047363005b1f0638c29584f847c8590 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 22 Jul 2022 18:14:26 +0200 Subject: [PATCH 492/785] remove hosts filter on integrator plugins --- openpype/plugins/publish/integrate.py | 4 ---- openpype/plugins/publish/integrate_legacy.py | 4 ---- 2 files changed, 8 deletions(-) diff --git a/openpype/plugins/publish/integrate.py b/openpype/plugins/publish/integrate.py index 1ddb694f85..8532691e61 100644 --- a/openpype/plugins/publish/integrate.py +++ b/openpype/plugins/publish/integrate.py @@ -104,10 +104,6 @@ class IntegrateAsset(pyblish.api.InstancePlugin): label = "Integrate Asset" order = pyblish.api.IntegratorOrder - hosts = ["aftereffects", "blender", "celaction", "flame", "harmony", - "hiero", "houdini", "nuke", "photoshop", "resolve", - "standalonepublisher", "traypublisher", "tvpaint", "unreal", - "webpublisher"] families = ["workfile", "pointcache", "camera", diff --git a/openpype/plugins/publish/integrate_legacy.py b/openpype/plugins/publish/integrate_legacy.py index 34e81a3839..b90b61f587 100644 --- a/openpype/plugins/publish/integrate_legacy.py +++ b/openpype/plugins/publish/integrate_legacy.py @@ -72,10 +72,6 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): label = "Integrate Asset (legacy)" # Make sure it happens after new integrator order = pyblish.api.IntegratorOrder + 0.00001 - hosts = ["aftereffects", "blender", "celaction", "flame", "harmony", - "hiero", "houdini", "nuke", "photoshop", "resolve", - "standalonepublisher", "traypublisher", "tvpaint", "unreal", - "webpublisher"] families = ["workfile", "pointcache", "camera", From 5c0f0f260365423128e410712c18c1938e83777b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= <33513211+antirotor@users.noreply.github.com> Date: Fri, 22 Jul 2022 18:22:48 +0200 Subject: [PATCH 493/785] :heavy_minus_sign: remove invalid submodules --- vendor/powershell/BurntToast | 1 - vendor/powershell/PSWriteColor | 1 - 2 files changed, 2 deletions(-) delete mode 160000 vendor/powershell/BurntToast delete mode 160000 vendor/powershell/PSWriteColor diff --git a/vendor/powershell/BurntToast b/vendor/powershell/BurntToast deleted file mode 160000 index ae0acdd870..0000000000 --- a/vendor/powershell/BurntToast +++ /dev/null @@ -1 +0,0 @@ -Subproject commit ae0acdd870a2fd8d9f0d147de22dc36d6c5e399e diff --git a/vendor/powershell/PSWriteColor b/vendor/powershell/PSWriteColor deleted file mode 160000 index 12eda384eb..0000000000 --- a/vendor/powershell/PSWriteColor +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 12eda384ebd7a7954e15855e312215c009c97114 From e69d8e3ac65ee273e7d9a23c4bbd5f741baf01c9 Mon Sep 17 00:00:00 2001 From: OpenPype Date: Sat, 23 Jul 2022 03:53:23 +0000 Subject: [PATCH 494/785] [Automated] Bump version --- CHANGELOG.md | 35 ++++++++++++++++++----------------- openpype/version.py | 2 +- pyproject.toml | 2 +- 3 files changed, 20 insertions(+), 19 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index e8da885473..ec880b9c61 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,11 +1,17 @@ # Changelog -## [3.12.2-nightly.2](https://github.com/pypeclub/OpenPype/tree/HEAD) +## [3.12.2-nightly.3](https://github.com/pypeclub/OpenPype/tree/HEAD) [Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.12.1...HEAD) +### 📖 Documentation + +- Update website with more studios [\#3554](https://github.com/pypeclub/OpenPype/pull/3554) +- Documentation: Update publishing dev docs [\#3549](https://github.com/pypeclub/OpenPype/pull/3549) + **🚀 Enhancements** +- Maya: add additional validators to Settings [\#3540](https://github.com/pypeclub/OpenPype/pull/3540) - General: Interactive console in cli [\#3526](https://github.com/pypeclub/OpenPype/pull/3526) - Ftrack: Automatic daily review session creation can define trigger hour [\#3516](https://github.com/pypeclub/OpenPype/pull/3516) - Ftrack: add source into Note [\#3509](https://github.com/pypeclub/OpenPype/pull/3509) @@ -20,8 +26,15 @@ **🐛 Bug fixes** +- Remove invalid submodules from `/vendor` [\#3557](https://github.com/pypeclub/OpenPype/pull/3557) +- General: Remove hosts filter on integrator plugins [\#3556](https://github.com/pypeclub/OpenPype/pull/3556) +- Settings: Clean default values of environments [\#3550](https://github.com/pypeclub/OpenPype/pull/3550) +- Module interfaces: Fix import error [\#3547](https://github.com/pypeclub/OpenPype/pull/3547) +- Workfiles tool: Show of tool and it's flags [\#3539](https://github.com/pypeclub/OpenPype/pull/3539) +- General: Create workfile documents works again [\#3538](https://github.com/pypeclub/OpenPype/pull/3538) - Additional fixes for powershell scripts [\#3525](https://github.com/pypeclub/OpenPype/pull/3525) - Maya: Added wrapper around cmds.setAttr [\#3523](https://github.com/pypeclub/OpenPype/pull/3523) +- Nuke: double slate [\#3521](https://github.com/pypeclub/OpenPype/pull/3521) - General: Fix hash of centos oiio archive [\#3519](https://github.com/pypeclub/OpenPype/pull/3519) - Maya: Renderman display output fix [\#3514](https://github.com/pypeclub/OpenPype/pull/3514) - TrayPublisher: Simple creation enhancements and fixes [\#3513](https://github.com/pypeclub/OpenPype/pull/3513) @@ -31,8 +44,12 @@ **🔀 Refactored code** +- Refactor Integrate Asset [\#3530](https://github.com/pypeclub/OpenPype/pull/3530) - General: Client docstrings cleanup [\#3529](https://github.com/pypeclub/OpenPype/pull/3529) +- General: Get current context document functions [\#3522](https://github.com/pypeclub/OpenPype/pull/3522) +- Kitsu: Use query function from client [\#3496](https://github.com/pypeclub/OpenPype/pull/3496) - TimersManager: Use query functions [\#3495](https://github.com/pypeclub/OpenPype/pull/3495) +- Deadline: Use query functions [\#3466](https://github.com/pypeclub/OpenPype/pull/3466) ## [3.12.1](https://github.com/pypeclub/OpenPype/tree/3.12.1) (2022-07-13) @@ -57,7 +74,6 @@ - Windows installer: Clean old files and add version subfolder [\#3445](https://github.com/pypeclub/OpenPype/pull/3445) - Blender: Bugfix - Set fps properly on open [\#3426](https://github.com/pypeclub/OpenPype/pull/3426) - Hiero: Add custom scripts menu [\#3425](https://github.com/pypeclub/OpenPype/pull/3425) -- Blender: pre pyside install for all platforms [\#3400](https://github.com/pypeclub/OpenPype/pull/3400) **🐛 Bug fixes** @@ -95,34 +111,19 @@ [Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.12.0-nightly.3...3.12.0) -### 📖 Documentation - -- Fix typo in documentation: pyenv on mac [\#3417](https://github.com/pypeclub/OpenPype/pull/3417) -- Linux: update OIIO package [\#3401](https://github.com/pypeclub/OpenPype/pull/3401) - **🚀 Enhancements** - Webserver: Added CORS middleware [\#3422](https://github.com/pypeclub/OpenPype/pull/3422) -- Attribute Defs UI: Files widget show what is allowed to drop in [\#3411](https://github.com/pypeclub/OpenPype/pull/3411) **🐛 Bug fixes** - NewPublisher: Fix subset name change on change of creator plugin [\#3420](https://github.com/pypeclub/OpenPype/pull/3420) - Bug: fix invalid avalon import [\#3418](https://github.com/pypeclub/OpenPype/pull/3418) -- Nuke: Fix keyword argument in query function [\#3414](https://github.com/pypeclub/OpenPype/pull/3414) -- Houdini: fix loading and updating vbd/bgeo sequences [\#3408](https://github.com/pypeclub/OpenPype/pull/3408) -- Nuke: Collect representation files based on Write [\#3407](https://github.com/pypeclub/OpenPype/pull/3407) -- General: Filter representations before integration start [\#3398](https://github.com/pypeclub/OpenPype/pull/3398) -- Maya: look collector typo [\#3392](https://github.com/pypeclub/OpenPype/pull/3392) **🔀 Refactored code** - Unreal: Use client query functions [\#3421](https://github.com/pypeclub/OpenPype/pull/3421) - General: Move editorial lib to pipeline [\#3419](https://github.com/pypeclub/OpenPype/pull/3419) -- Kitsu: renaming to plural func sync\_all\_projects [\#3397](https://github.com/pypeclub/OpenPype/pull/3397) -- Houdini: Use client query functions [\#3395](https://github.com/pypeclub/OpenPype/pull/3395) -- Hiero: Use client query functions [\#3393](https://github.com/pypeclub/OpenPype/pull/3393) -- Nuke: Use client query functions [\#3391](https://github.com/pypeclub/OpenPype/pull/3391) ## [3.11.1](https://github.com/pypeclub/OpenPype/tree/3.11.1) (2022-06-20) diff --git a/openpype/version.py b/openpype/version.py index dd5ad97449..9dda1eacce 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.12.2-nightly.2" +__version__ = "3.12.2-nightly.3" diff --git a/pyproject.toml b/pyproject.toml index 9552242694..eebc8a5600 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "OpenPype" -version = "3.12.2-nightly.2" # OpenPype +version = "3.12.2-nightly.3" # OpenPype description = "Open VFX and Animation pipeline with support." authors = ["OpenPype Team "] license = "MIT License" From 5e31967310836d0ef1ad3a1590fa1f3c7d9c682d Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 25 Jul 2022 10:53:07 +0200 Subject: [PATCH 495/785] implemented helper function to escape html symbols --- openpype/tools/utils/lib.py | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/openpype/tools/utils/lib.py b/openpype/tools/utils/lib.py index ea1362945f..2169cf8ef1 100644 --- a/openpype/tools/utils/lib.py +++ b/openpype/tools/utils/lib.py @@ -37,6 +37,19 @@ def center_window(window): window.move(geo.topLeft()) +def html_escape(text): + """Basic escape of html syntax symbols in text.""" + + return ( + text + .replace("&", "&") + .replace("<", "<") + .replace(">", ">") + .replace('"', """) + .replace("'", "'") + ) + + def set_style_property(widget, property_name, property_value): """Set widget's property that may affect style. From 307612d86878f3d098cc3d5c90f9d3464fb87b5f Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 25 Jul 2022 10:53:24 +0200 Subject: [PATCH 496/785] replace implemented functions instead of 'html' --- openpype/tools/publisher/publish_report_viewer/model.py | 4 ++-- openpype/tools/publisher/widgets/card_view_widgets.py | 4 ++-- openpype/tools/publisher/widgets/list_view_widgets.py | 6 +++--- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/openpype/tools/publisher/publish_report_viewer/model.py b/openpype/tools/publisher/publish_report_viewer/model.py index bd03376c55..704feeb4bd 100644 --- a/openpype/tools/publisher/publish_report_viewer/model.py +++ b/openpype/tools/publisher/publish_report_viewer/model.py @@ -1,9 +1,9 @@ import uuid -import html from Qt import QtCore, QtGui import pyblish.api +from openpype.tools.utils.lib import html_escape from .constants import ( ITEM_ID_ROLE, ITEM_IS_GROUP_ROLE, @@ -46,7 +46,7 @@ class InstancesModel(QtGui.QStandardItemModel): all_removed = True for instance_item in instance_items: item = QtGui.QStandardItem(instance_item.label) - instance_label = html.escape(instance_item.label) + instance_label = html_escape(instance_item.label) item.setData(instance_label, ITEM_LABEL_ROLE) item.setData(instance_item.errored, ITEM_ERRORED_ROLE) item.setData(instance_item.id, ITEM_ID_ROLE) diff --git a/openpype/tools/publisher/widgets/card_view_widgets.py b/openpype/tools/publisher/widgets/card_view_widgets.py index bd591138f4..fa391f4ba0 100644 --- a/openpype/tools/publisher/widgets/card_view_widgets.py +++ b/openpype/tools/publisher/widgets/card_view_widgets.py @@ -22,13 +22,13 @@ Only one item can be selected at a time. import re import collections -import html from Qt import QtWidgets, QtCore from openpype.widgets.nice_checkbox import NiceCheckbox from openpype.tools.utils import BaseClickableFrame +from openpype.tools.utils.lib import html_escape from .widgets import ( AbstractInstanceView, ContextWarningLabel, @@ -308,7 +308,7 @@ class InstanceCardWidget(CardWidget): self._last_variant = variant self._last_subset_name = subset_name # Make `variant` bold - label = html.escape(self.instance.label) + label = html_escape(self.instance.label) found_parts = set(re.findall(variant, label, re.IGNORECASE)) if found_parts: for part in found_parts: diff --git a/openpype/tools/publisher/widgets/list_view_widgets.py b/openpype/tools/publisher/widgets/list_view_widgets.py index 3e4fd5b72d..6e31ba635b 100644 --- a/openpype/tools/publisher/widgets/list_view_widgets.py +++ b/openpype/tools/publisher/widgets/list_view_widgets.py @@ -23,12 +23,12 @@ selection can be enabled disabled using checkbox or keyboard key presses: ``` """ import collections -import html from Qt import QtWidgets, QtCore, QtGui from openpype.style import get_objected_colors from openpype.widgets.nice_checkbox import NiceCheckbox +from openpype.tools.utils.lib import html_escape from .widgets import AbstractInstanceView from ..constants import ( INSTANCE_ID_ROLE, @@ -114,7 +114,7 @@ class InstanceListItemWidget(QtWidgets.QWidget): self.instance = instance - instance_label = html.escape(instance.label) + instance_label = html_escape(instance.label) subset_name_label = QtWidgets.QLabel(instance_label, self) subset_name_label.setObjectName("ListViewSubsetName") @@ -181,7 +181,7 @@ class InstanceListItemWidget(QtWidgets.QWidget): # Check subset name label = self.instance.label if label != self._instance_label_widget.text(): - self._instance_label_widget.setText(html.escape(label)) + self._instance_label_widget.setText(html_escape(label)) # Check active state self.set_active(self.instance["active"]) # Check valid states From 7905d18e6713b1307c2ea33fc3f4cb07577999b0 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 25 Jul 2022 12:40:03 +0200 Subject: [PATCH 497/785] check if instance have representations as first thing --- openpype/plugins/publish/extract_thumbnail.py | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/openpype/plugins/publish/extract_thumbnail.py b/openpype/plugins/publish/extract_thumbnail.py index 7933595b89..789b6c75bc 100644 --- a/openpype/plugins/publish/extract_thumbnail.py +++ b/openpype/plugins/publish/extract_thumbnail.py @@ -29,7 +29,17 @@ class ExtractThumbnail(pyblish.api.InstancePlugin): ffmpeg_args = None def process(self, instance): - self.log.info("subset {}".format(instance.data['subset'])) + subset_name = instance.data["subset"] + instance_repres = instance.data.get("representations") + if not instance_repres: + self.log.debug(( + "Instance {} does not have representations. Skipping" + ).format(subset_name)) + return + + self.log.info( + "Processing instance with subset name {}".format(subset_name) + ) # skip crypto passes. # TODO: This is just a quick fix and has its own side-effects - it is From 89705a69d1df6ba69792290d56220e2f2bd317f7 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 25 Jul 2022 12:43:10 +0200 Subject: [PATCH 498/785] move instance review key check earlier and move the logic to method --- openpype/plugins/publish/extract_thumbnail.py | 17 ++++++++++++----- 1 file changed, 12 insertions(+), 5 deletions(-) diff --git a/openpype/plugins/publish/extract_thumbnail.py b/openpype/plugins/publish/extract_thumbnail.py index 789b6c75bc..839618bcdd 100644 --- a/openpype/plugins/publish/extract_thumbnail.py +++ b/openpype/plugins/publish/extract_thumbnail.py @@ -41,6 +41,11 @@ class ExtractThumbnail(pyblish.api.InstancePlugin): "Processing instance with subset name {}".format(subset_name) ) + # Skip if instance does not have review + if not self._is_review_instance(instance): + self.log.info("Skipping - no review set on instance.") + return + # skip crypto passes. # TODO: This is just a quick fix and has its own side-effects - it is # affecting every subset name with `crypto` in its name. @@ -51,11 +56,6 @@ class ExtractThumbnail(pyblish.api.InstancePlugin): self.log.info("Skipping crypto passes.") return - # Skip if review not set. - if not instance.data.get("review", True): - self.log.info("Skipping - no review set on instance.") - return - if self._already_has_thumbnail(instance): self.log.info("Thumbnail representation already present.") return @@ -116,6 +116,13 @@ class ExtractThumbnail(pyblish.api.InstancePlugin): # There is no need to create more then one thumbnail break + def _is_review_instance(self, instance): + # TODO: We should probably handle "not creating" of thumbnail + # other way then checking for "review" key on instance data? + if instance.data.get("review", True): + return True + return False + def _already_has_thumbnail(self, instance): for repre in instance.data.get("representations", []): self.log.info("repre {}".format(repre)) From f82c97dc6aae62007de239c30fe8304b561a2a3b Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 25 Jul 2022 12:45:44 +0200 Subject: [PATCH 499/785] check existing thumbnail before crypto check --- openpype/plugins/publish/extract_thumbnail.py | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/openpype/plugins/publish/extract_thumbnail.py b/openpype/plugins/publish/extract_thumbnail.py index 839618bcdd..51624a3cc7 100644 --- a/openpype/plugins/publish/extract_thumbnail.py +++ b/openpype/plugins/publish/extract_thumbnail.py @@ -41,11 +41,16 @@ class ExtractThumbnail(pyblish.api.InstancePlugin): "Processing instance with subset name {}".format(subset_name) ) - # Skip if instance does not have review + # Skip if instance have 'review' key in data set to 'False' if not self._is_review_instance(instance): self.log.info("Skipping - no review set on instance.") return + # Check if already has thumbnail created + if self._already_has_thumbnail(instance_repres): + self.log.info("Thumbnail representation already present.") + return + # skip crypto passes. # TODO: This is just a quick fix and has its own side-effects - it is # affecting every subset name with `crypto` in its name. @@ -56,9 +61,6 @@ class ExtractThumbnail(pyblish.api.InstancePlugin): self.log.info("Skipping crypto passes.") return - if self._already_has_thumbnail(instance): - self.log.info("Thumbnail representation already present.") - return filtered_repres = self._get_filtered_repres(instance) for repre in filtered_repres: @@ -123,12 +125,11 @@ class ExtractThumbnail(pyblish.api.InstancePlugin): return True return False - def _already_has_thumbnail(self, instance): - for repre in instance.data.get("representations", []): + def _already_has_thumbnail(self, repres): + for repre in repres: self.log.info("repre {}".format(repre)) if repre["name"] == "thumbnail": return True - return False def _get_filtered_repres(self, instance): From 239414ffff9d9ec0e03c6b5239e7206317d5b9fa Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 25 Jul 2022 12:49:52 +0200 Subject: [PATCH 500/785] try to create thumbnail from all filtered representations --- openpype/plugins/publish/extract_thumbnail.py | 17 +++++++++++++---- 1 file changed, 13 insertions(+), 4 deletions(-) diff --git a/openpype/plugins/publish/extract_thumbnail.py b/openpype/plugins/publish/extract_thumbnail.py index 51624a3cc7..cb1af12586 100644 --- a/openpype/plugins/publish/extract_thumbnail.py +++ b/openpype/plugins/publish/extract_thumbnail.py @@ -63,6 +63,14 @@ class ExtractThumbnail(pyblish.api.InstancePlugin): filtered_repres = self._get_filtered_repres(instance) + if not filtered_repres: + self.log.info(( + "Instance don't have representations" + " that can be used as source for thumbnail. Skipping" + )) + return + + thumbnail_created = False for repre in filtered_repres: repre_files = repre["files"] if not isinstance(repre_files, (list, tuple)): @@ -81,7 +89,6 @@ class ExtractThumbnail(pyblish.api.InstancePlugin): jpeg_file = filename + "jpg" full_output_path = os.path.join(stagingdir, jpeg_file) - thumbnail_created = False # Try to use FFMPEG if OIIO is not supported (for cases when # oiiotool isn't available) if not is_oiio_supported(): @@ -96,10 +103,9 @@ class ExtractThumbnail(pyblish.api.InstancePlugin): self.log.info("Converting with FFMPEG because input can't be read by OIIO.") # noqa thumbnail_created = self.create_thumbnail_ffmpeg(full_input_path, full_output_path) # noqa - # Skip the rest of the process if the thumbnail wasn't created + # Skip representation and try next one if wasn't created if not thumbnail_created: - self.log.warning("Thumbanil has not been created.") - return + continue new_repre = { "name": "thumbnail", @@ -118,6 +124,9 @@ class ExtractThumbnail(pyblish.api.InstancePlugin): # There is no need to create more then one thumbnail break + if not thumbnail_created: + self.log.warning("Thumbanil has not been created.") + def _is_review_instance(self, instance): # TODO: We should probably handle "not creating" of thumbnail # other way then checking for "review" key on instance data? From 6a018364b2961d21dab66657b12613838ba54750 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 25 Jul 2022 12:53:27 +0200 Subject: [PATCH 501/785] create custom staging dir for thumbnail representation --- openpype/plugins/publish/extract_thumbnail.py | 28 +++++++++++-------- 1 file changed, 16 insertions(+), 12 deletions(-) diff --git a/openpype/plugins/publish/extract_thumbnail.py b/openpype/plugins/publish/extract_thumbnail.py index cb1af12586..2715aa4db4 100644 --- a/openpype/plugins/publish/extract_thumbnail.py +++ b/openpype/plugins/publish/extract_thumbnail.py @@ -1,4 +1,5 @@ import os +import tempfile import pyblish.api from openpype.lib import ( @@ -8,8 +9,6 @@ from openpype.lib import ( run_subprocess, path_to_subprocess_arg, - - execute, ) @@ -57,11 +56,10 @@ class ExtractThumbnail(pyblish.api.InstancePlugin): # This must be solved properly, maybe using tags on # representation that can be determined much earlier and # with better precision. - if 'crypto' in instance.data['subset'].lower(): + if "crypto" in subset_name.lower(): self.log.info("Skipping crypto passes.") return - filtered_repres = self._get_filtered_repres(instance) if not filtered_repres: self.log.info(( @@ -70,6 +68,15 @@ class ExtractThumbnail(pyblish.api.InstancePlugin): )) return + # Create temp directory for thumbnail + # - this is to avoid "override" of source file + dst_staging = tempfile.mkdtemp(prefix="pyblish_tmp_") + self.log.debug( + "Create temp directory {} for thumbnail".formap(dst_staging) + ) + # Store new staging to cleanup paths + instance.context.data["cleanupFullPaths"].append(dst_staging) + thumbnail_created = False for repre in filtered_repres: repre_files = repre["files"] @@ -79,15 +86,12 @@ class ExtractThumbnail(pyblish.api.InstancePlugin): file_index = int(float(len(repre_files)) * 0.5) input_file = repre_files[file_index] - stagingdir = os.path.normpath(repre["stagingDir"]) - - full_input_path = os.path.join(stagingdir, input_file) + src_staging = os.path.normpath(repre["stagingDir"]) + full_input_path = os.path.join(src_staging, input_file) self.log.info("input {}".format(full_input_path)) filename = os.path.splitext(input_file)[0] - if not filename.endswith('.'): - filename += "." - jpeg_file = filename + "jpg" - full_output_path = os.path.join(stagingdir, jpeg_file) + jpeg_file = filename + ".jpg" + full_output_path = os.path.join(dst_staging, jpeg_file) # Try to use FFMPEG if OIIO is not supported (for cases when # oiiotool isn't available) @@ -111,7 +115,7 @@ class ExtractThumbnail(pyblish.api.InstancePlugin): "name": "thumbnail", "ext": "jpg", "files": jpeg_file, - "stagingDir": stagingdir, + "stagingDir": dst_staging, "thumbnail": True, "tags": ["thumbnail"] } From d1987eed02ba4ca842cd820ef0947adbb4240d2b Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 25 Jul 2022 12:53:58 +0200 Subject: [PATCH 502/785] removed unneeded f string --- openpype/plugins/publish/extract_thumbnail.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/openpype/plugins/publish/extract_thumbnail.py b/openpype/plugins/publish/extract_thumbnail.py index 2715aa4db4..d944c341e5 100644 --- a/openpype/plugins/publish/extract_thumbnail.py +++ b/openpype/plugins/publish/extract_thumbnail.py @@ -167,12 +167,12 @@ class ExtractThumbnail(pyblish.api.InstancePlugin): def create_thumbnail_oiio(self, src_path, dst_path): self.log.info("outputting {}".format(dst_path)) oiio_tool_path = get_oiio_tools_path() - oiio_cmd = [oiio_tool_path, "-a", - src_path, "-o", - dst_path - ] - subprocess_exr = " ".join(oiio_cmd) - self.log.info(f"running: {subprocess_exr}") + oiio_cmd = [ + oiio_tool_path, + "-a", src_path, + "-o", dst_path + ] + self.log.info("running: {}".format(" ".join(oiio_cmd))) try: run_subprocess(oiio_cmd, logger=self.log) return True From adc9b9303ba090dd1125f83b5d619306d230c5b9 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 25 Jul 2022 12:55:29 +0200 Subject: [PATCH 503/785] reduced order of thumbnail creation to 2 conditions --- openpype/plugins/publish/extract_thumbnail.py | 28 ++++++++++++------- 1 file changed, 18 insertions(+), 10 deletions(-) diff --git a/openpype/plugins/publish/extract_thumbnail.py b/openpype/plugins/publish/extract_thumbnail.py index d944c341e5..c1eee71376 100644 --- a/openpype/plugins/publish/extract_thumbnail.py +++ b/openpype/plugins/publish/extract_thumbnail.py @@ -78,6 +78,7 @@ class ExtractThumbnail(pyblish.api.InstancePlugin): instance.context.data["cleanupFullPaths"].append(dst_staging) thumbnail_created = False + oiio_supported = is_oiio_supported() for repre in filtered_repres: repre_files = repre["files"] if not isinstance(repre_files, (list, tuple)): @@ -93,19 +94,26 @@ class ExtractThumbnail(pyblish.api.InstancePlugin): jpeg_file = filename + ".jpg" full_output_path = os.path.join(dst_staging, jpeg_file) - # Try to use FFMPEG if OIIO is not supported (for cases when - # oiiotool isn't available) - if not is_oiio_supported(): - thumbnail_created = self.create_thumbnail_ffmpeg(full_input_path, full_output_path) # noqa - else: + if oiio_supported: + self.log.info("Trying to convert with OIIO") # If the input can read by OIIO then use OIIO method for # conversion otherwise use ffmpeg - self.log.info("Trying to convert with OIIO") # noqa - thumbnail_created = self.create_thumbnail_oiio(full_input_path, full_output_path) # noqa + thumbnail_created = self.create_thumbnail_oiio( + full_input_path, full_output_path + ) - if not thumbnail_created: - self.log.info("Converting with FFMPEG because input can't be read by OIIO.") # noqa - thumbnail_created = self.create_thumbnail_ffmpeg(full_input_path, full_output_path) # noqa + # Try to use FFMPEG if OIIO is not supported or for cases when + # oiiotool isn't available + if not thumbnail_created: + if oiio_supported: + self.log.info(( + "Converting with FFMPEG because input" + " can't be read by OIIO." + )) + + thumbnail_created = self.create_thumbnail_ffmpeg( + full_input_path, full_output_path + ) # Skip representation and try next one if wasn't created if not thumbnail_created: From a96bfc45ad05c14267ef7d6cd968ab412cf00172 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 25 Jul 2022 13:49:15 +0200 Subject: [PATCH 504/785] Fix - method expects dict not id --- openpype/hosts/harmony/plugins/load/load_background.py | 2 +- openpype/hosts/harmony/plugins/load/load_imagesequence.py | 2 +- openpype/hosts/harmony/plugins/load/load_template.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/harmony/plugins/load/load_background.py b/openpype/hosts/harmony/plugins/load/load_background.py index 9e9fcbfa32..c28a87791e 100644 --- a/openpype/hosts/harmony/plugins/load/load_background.py +++ b/openpype/hosts/harmony/plugins/load/load_background.py @@ -300,7 +300,7 @@ class BackgroundLoader(load.LoaderPlugin): print(container) - is_latest = is_representation_from_latest(representation["parent"]) + is_latest = is_representation_from_latest(representation) for layer in sorted(layers): file_to_import = [ os.path.join(bg_folder, layer).replace("\\", "/") diff --git a/openpype/hosts/harmony/plugins/load/load_imagesequence.py b/openpype/hosts/harmony/plugins/load/load_imagesequence.py index 8d6421a6aa..1b64aff595 100644 --- a/openpype/hosts/harmony/plugins/load/load_imagesequence.py +++ b/openpype/hosts/harmony/plugins/load/load_imagesequence.py @@ -109,7 +109,7 @@ class ImageSequenceLoader(load.LoaderPlugin): ) # Colour node. - if is_representation_from_latest(representation["parent"]): + if is_representation_from_latest(representation): harmony.send( { "function": "PypeHarmony.setColor", diff --git a/openpype/hosts/harmony/plugins/load/load_template.py b/openpype/hosts/harmony/plugins/load/load_template.py index 8ddd3934f7..f3c69a9104 100644 --- a/openpype/hosts/harmony/plugins/load/load_template.py +++ b/openpype/hosts/harmony/plugins/load/load_template.py @@ -83,7 +83,7 @@ class TemplateLoader(load.LoaderPlugin): self_name = self.__class__.__name__ update_and_replace = False - if is_representation_from_latest(representation["parent"]): + if is_representation_from_latest(representation): self._set_green(node) else: self._set_red(node) From d7917b1950c6d5cd79e2b4d297de99d8a9ce2c82 Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Mon, 25 Jul 2022 15:30:03 +0300 Subject: [PATCH 505/785] Handle locked attributes for playblast capture. --- openpype/vendor/python/common/capture.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/openpype/vendor/python/common/capture.py b/openpype/vendor/python/common/capture.py index 71b86a5f1a..86c1c60e56 100644 --- a/openpype/vendor/python/common/capture.py +++ b/openpype/vendor/python/common/capture.py @@ -665,7 +665,10 @@ def _applied_camera_options(options, panel): _iteritems = getattr(options, "iteritems", options.items) for opt, value in _iteritems(): - _safe_setAttr(camera + "." + opt, value) + if cmds.getAttr(camera + "." + opt, lock=True): + continue + else: + _safe_setAttr(camera + "." + opt, value) try: yield @@ -673,7 +676,11 @@ def _applied_camera_options(options, panel): if old_options: _iteritems = getattr(old_options, "iteritems", old_options.items) for opt, value in _iteritems(): - _safe_setAttr(camera + "." + opt, value) + # + if cmds.getAttr(camera + "." + opt, lock=True): + continue + else: + _safe_setAttr(camera + "." + opt, value) @contextlib.contextmanager From 649ddf19c9ca49b5d0838045626a47c41ac17767 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 25 Jul 2022 14:40:53 +0200 Subject: [PATCH 506/785] query representation using query function --- openpype/plugins/publish/integrate.py | 17 ++++++++++------- 1 file changed, 10 insertions(+), 7 deletions(-) diff --git a/openpype/plugins/publish/integrate.py b/openpype/plugins/publish/integrate.py index 8532691e61..597ed9844e 100644 --- a/openpype/plugins/publish/integrate.py +++ b/openpype/plugins/publish/integrate.py @@ -10,6 +10,9 @@ from pymongo import DeleteMany, ReplaceOne, InsertOne, UpdateOne import pyblish.api import openpype.api +from openpype.client import ( + get_representations, +) from openpype.lib.profiles_filtering import filter_profiles from openpype.lib.file_transaction import FileTransaction from openpype.pipeline import legacy_io @@ -274,6 +277,8 @@ class IntegrateAsset(pyblish.api.InstancePlugin): return filtered_repres def register(self, instance, file_transactions, filtered_repres): + project_name = legacy_io.active_project() + instance_stagingdir = instance.data.get("stagingDir") if not instance_stagingdir: self.log.info(( @@ -295,13 +300,11 @@ class IntegrateAsset(pyblish.api.InstancePlugin): # Get existing representations (if any) existing_repres_by_name = { - repres["name"].lower(): repres for repres in legacy_io.find( - { - "parent": version["_id"], - "type": "representation" - }, - # Only care about id and name of existing representations - projection={"_id": True, "name": True} + repre_doc["name"].lower(): repre_doc + for repre_doc in get_representations( + project_name, + version_ids=version["_id"], + fields=["_id", "name"] ) } From e7c937bdc086e214fc05a5344b516257ee11751a Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 25 Jul 2022 14:42:02 +0200 Subject: [PATCH 507/785] use query function to query subset document --- openpype/plugins/publish/integrate.py | 31 +++++++++++++-------------- 1 file changed, 15 insertions(+), 16 deletions(-) diff --git a/openpype/plugins/publish/integrate.py b/openpype/plugins/publish/integrate.py index 597ed9844e..5ac5680cfa 100644 --- a/openpype/plugins/publish/integrate.py +++ b/openpype/plugins/publish/integrate.py @@ -12,6 +12,7 @@ import pyblish.api import openpype.api from openpype.client import ( get_representations, + get_subset_by_name, ) from openpype.lib.profiles_filtering import filter_profiles from openpype.lib.file_transaction import FileTransaction @@ -294,7 +295,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin): template_name = self.get_template_name(instance) - subset, subset_writes = self.prepare_subset(instance) + subset, subset_writes = self.prepare_subset(instance, project_name) version, version_writes = self.prepare_version(instance, subset) instance.data["versionEntity"] = version @@ -429,17 +430,15 @@ class IntegrateAsset(pyblish.api.InstancePlugin): self.log.info("Registered {} representations" "".format(len(prepared_representations))) - def prepare_subset(self, instance): - asset = instance.data.get("assetEntity") + def prepare_subset(self, instance, project_name): + asset_doc = instance.data.get("assetEntity") subset_name = instance.data["subset"] self.log.debug("Subset: {}".format(subset_name)) # Get existing subset if it exists - subset = legacy_io.find_one({ - "type": "subset", - "parent": asset["_id"], - "name": subset_name - }) + subset_doc = get_subset_by_name( + project_name, subset_name, asset_doc["_id"] + ) # Define subset data data = { @@ -451,33 +450,33 @@ class IntegrateAsset(pyblish.api.InstancePlugin): data["subsetGroup"] = subset_group bulk_writes = [] - if subset is None: + if subset_doc is None: # Create a new subset self.log.info("Subset '%s' not found, creating ..." % subset_name) - subset = { + subset_doc = { "_id": ObjectId(), "schema": "openpype:subset-3.0", "type": "subset", "name": subset_name, "data": data, - "parent": asset["_id"] + "parent": asset_doc["_id"] } - bulk_writes.append(InsertOne(subset)) + bulk_writes.append(InsertOne(subset_doc)) else: # Update existing subset data with new data and set in database. # We also change the found subset in-place so we don't need to # re-query the subset afterwards - subset["data"].update(data) + subset_doc["data"].update(data) bulk_writes.append(UpdateOne( - {"type": "subset", "_id": subset["_id"]}, + {"type": "subset", "_id": subset_doc["_id"]}, {"$set": { - "data": subset["data"] + "data": subset_doc["data"] }} )) self.log.info("Prepared subset: {}".format(subset_name)) - return subset, bulk_writes + return subset_doc, bulk_writes def prepare_version(self, instance, subset): From 1a5258b2fe7d0dd2ec642b3648ba2e17707105b0 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 25 Jul 2022 14:43:08 +0200 Subject: [PATCH 508/785] use query function to get version document --- openpype/plugins/publish/integrate.py | 31 ++++++++++++++------------- 1 file changed, 16 insertions(+), 15 deletions(-) diff --git a/openpype/plugins/publish/integrate.py b/openpype/plugins/publish/integrate.py index 5ac5680cfa..6236724228 100644 --- a/openpype/plugins/publish/integrate.py +++ b/openpype/plugins/publish/integrate.py @@ -13,6 +13,7 @@ import openpype.api from openpype.client import ( get_representations, get_subset_by_name, + get_version_by_name, ) from openpype.lib.profiles_filtering import filter_profiles from openpype.lib.file_transaction import FileTransaction @@ -478,40 +479,40 @@ class IntegrateAsset(pyblish.api.InstancePlugin): self.log.info("Prepared subset: {}".format(subset_name)) return subset_doc, bulk_writes - def prepare_version(self, instance, subset): - + def prepare_version(self, instance, subset_doc, project_name): version_number = instance.data["version"] - version = { + version_doc = { "schema": "openpype:version-3.0", "type": "version", - "parent": subset["_id"], + "parent": subset_doc["_id"], "name": version_number, "data": self.create_version_data(instance) } - existing_version = legacy_io.find_one({ - 'type': 'version', - 'parent': subset["_id"], - 'name': version_number - }, projection={"_id": True}) + existing_version = get_version_by_name( + project_name, + version_number, + subset_doc["_id"], + fields=["_id"] + ) if existing_version: self.log.debug("Updating existing version ...") - version["_id"] = existing_version["_id"] + version_doc["_id"] = existing_version["_id"] else: self.log.debug("Creating new version ...") - version["_id"] = ObjectId() + version_doc["_id"] = ObjectId() bulk_writes = [ReplaceOne( - filter={"_id": version["_id"]}, - replacement=version, + filter={"_id": version_doc["_id"]}, + replacement=version_doc, upsert=True )] - self.log.info("Prepared version: v{0:03d}".format(version["name"])) + self.log.info("Prepared version: v{0:03d}".format(version_doc["name"])) - return version, bulk_writes + return version_doc, bulk_writes def prepare_representation(self, repre, template_name, From e8bfbf4292979f54ac82114b967394392eefa0a1 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 25 Jul 2022 16:44:34 +0200 Subject: [PATCH 509/785] Added validator for old containers for AfterEffects --- openpype/plugins/publish/validate_containers.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/plugins/publish/validate_containers.py b/openpype/plugins/publish/validate_containers.py index 7732ec5ea9..b2a3ed9b79 100644 --- a/openpype/plugins/publish/validate_containers.py +++ b/openpype/plugins/publish/validate_containers.py @@ -19,7 +19,7 @@ class ValidateContainers(pyblish.api.ContextPlugin): label = "Validate Containers" order = pyblish.api.ValidatorOrder - hosts = ["maya", "houdini", "nuke", "harmony", "photoshop"] + hosts = ["maya", "houdini", "nuke", "harmony", "photoshop", "aftereffects"] optional = True actions = [ShowInventory] From 441009fe5f8dbd5422b47624a23ef06ccac2af7d Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 25 Jul 2022 17:18:02 +0200 Subject: [PATCH 510/785] general: removing exclude family filtering from integrator --- openpype/plugins/publish/integrate.py | 10 +--------- 1 file changed, 1 insertion(+), 9 deletions(-) diff --git a/openpype/plugins/publish/integrate.py b/openpype/plugins/publish/integrate.py index 8532691e61..06909f0ec3 100644 --- a/openpype/plugins/publish/integrate.py +++ b/openpype/plugins/publish/integrate.py @@ -156,7 +156,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin): "mvUsdOverride", "simpleUnrealTexture" ] - exclude_families = ["clip", "render.farm"] + default_template_name = "publish" # Representation context keys that should always be written to @@ -190,14 +190,6 @@ class IntegrateAsset(pyblish.api.InstancePlugin): ).format(instance.data["family"])) return - # Exclude instances that also contain families from exclude families - families = set(get_instance_families(instance)) - exclude = families & set(self.exclude_families) - if exclude: - self.log.debug("Instance not integrated due to exclude " - "families found: {}".format(", ".join(exclude))) - return - file_transactions = FileTransaction(log=self.log) try: self.register(instance, file_transactions, filtered_repres) From 66d283ecdf0ac881624929fc76cbe48e1398b2d3 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 25 Jul 2022 17:34:05 +0200 Subject: [PATCH 511/785] nuke: add `farm` key to instance data if render.farm --- openpype/hosts/nuke/plugins/publish/precollect_instances.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/hosts/nuke/plugins/publish/precollect_instances.py b/openpype/hosts/nuke/plugins/publish/precollect_instances.py index 4b3b70fa12..b0da94c4ce 100644 --- a/openpype/hosts/nuke/plugins/publish/precollect_instances.py +++ b/openpype/hosts/nuke/plugins/publish/precollect_instances.py @@ -94,6 +94,7 @@ class PreCollectNukeInstances(pyblish.api.ContextPlugin): # Farm rendering self.log.info("flagged for farm render") instance.data["transfer"] = False + instance.data["farm"] = True families.append("{}.farm".format(family)) family = families_ak.lower() From 53e430ee50eaa9d33157573342c2ffb4020121b0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Mon, 25 Jul 2022 17:49:18 +0200 Subject: [PATCH 512/785] :bug: fix active pane loss --- openpype/hosts/maya/plugins/publish/extract_playblast.py | 2 ++ openpype/hosts/maya/plugins/publish/extract_thumbnail.py | 2 ++ 2 files changed, 4 insertions(+) diff --git a/openpype/hosts/maya/plugins/publish/extract_playblast.py b/openpype/hosts/maya/plugins/publish/extract_playblast.py index 233a0b60c2..54ef09e060 100644 --- a/openpype/hosts/maya/plugins/publish/extract_playblast.py +++ b/openpype/hosts/maya/plugins/publish/extract_playblast.py @@ -128,8 +128,10 @@ class ExtractPlayblast(openpype.api.Extractor): # Update preset with current panel setting # if override_viewport_options is turned off if not override_viewport_options: + panel = cmds.getPanel(with_focus=True) panel_preset = capture.parse_active_view() preset.update(panel_preset) + cmds.setFocus(panel) path = capture.capture(**preset) diff --git a/openpype/hosts/maya/plugins/publish/extract_thumbnail.py b/openpype/hosts/maya/plugins/publish/extract_thumbnail.py index 4f28aa167c..d1f43b61be 100644 --- a/openpype/hosts/maya/plugins/publish/extract_thumbnail.py +++ b/openpype/hosts/maya/plugins/publish/extract_thumbnail.py @@ -110,8 +110,10 @@ class ExtractThumbnail(openpype.api.Extractor): # Update preset with current panel setting # if override_viewport_options is turned off if not override_viewport_options: + panel = cmds.getPanel(with_focus=True) panel_preset = capture.parse_active_view() preset.update(panel_preset) + cmds.setFocus(panel) path = capture.capture(**preset) playblast = self._fix_playblast_output_path(path) From ede691c3e90244fa46a388ad4280bf22b0c50d31 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 25 Jul 2022 18:20:57 +0200 Subject: [PATCH 513/785] fix missing arg --- openpype/plugins/publish/integrate.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/openpype/plugins/publish/integrate.py b/openpype/plugins/publish/integrate.py index 6236724228..0193d136c2 100644 --- a/openpype/plugins/publish/integrate.py +++ b/openpype/plugins/publish/integrate.py @@ -297,7 +297,9 @@ class IntegrateAsset(pyblish.api.InstancePlugin): template_name = self.get_template_name(instance) subset, subset_writes = self.prepare_subset(instance, project_name) - version, version_writes = self.prepare_version(instance, subset) + version, version_writes = self.prepare_version( + instance, subset, project_name + ) instance.data["versionEntity"] = version # Get existing representations (if any) From 6ae84ca5e6dec80990d13950d4226a7e66a99e66 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 25 Jul 2022 18:23:41 +0200 Subject: [PATCH 514/785] fix passed argument to get_representations --- openpype/plugins/publish/integrate.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/plugins/publish/integrate.py b/openpype/plugins/publish/integrate.py index 0193d136c2..8048ce3ab9 100644 --- a/openpype/plugins/publish/integrate.py +++ b/openpype/plugins/publish/integrate.py @@ -307,7 +307,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin): repre_doc["name"].lower(): repre_doc for repre_doc in get_representations( project_name, - version_ids=version["_id"], + version_ids=[version["_id"]], fields=["_id", "name"] ) } From d4f96ae720c258c7ec6895d5398ee2a0c3e96812 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 25 Jul 2022 18:30:23 +0200 Subject: [PATCH 515/785] change order of some collectors --- openpype/plugins/publish/collect_datetime_data.py | 2 +- openpype/plugins/publish/collect_machine_name.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/plugins/publish/collect_datetime_data.py b/openpype/plugins/publish/collect_datetime_data.py index 1675ae1a98..0d21490d8d 100644 --- a/openpype/plugins/publish/collect_datetime_data.py +++ b/openpype/plugins/publish/collect_datetime_data.py @@ -9,7 +9,7 @@ from openpype.api import config class CollectDateTimeData(pyblish.api.ContextPlugin): - order = pyblish.api.CollectorOrder + order = pyblish.api.CollectorOrder - 0.5 label = "Collect DateTime data" def process(self, context): diff --git a/openpype/plugins/publish/collect_machine_name.py b/openpype/plugins/publish/collect_machine_name.py index 72ef68f8ed..8c25966031 100644 --- a/openpype/plugins/publish/collect_machine_name.py +++ b/openpype/plugins/publish/collect_machine_name.py @@ -11,7 +11,7 @@ import pyblish.api class CollectMachineName(pyblish.api.ContextPlugin): label = "Local Machine Name" - order = pyblish.api.CollectorOrder + order = pyblish.api.CollectorOrder - 0.5 hosts = ["*"] def process(self, context): From 0b88bc1fcd689d8096fe294e48951b2663d49aa9 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 25 Jul 2022 18:31:50 +0200 Subject: [PATCH 516/785] added collector to stored current context into publish context data --- .../publish/collect_current_context.py | 44 +++++++++++++++++++ 1 file changed, 44 insertions(+) create mode 100644 openpype/plugins/publish/collect_current_context.py diff --git a/openpype/plugins/publish/collect_current_context.py b/openpype/plugins/publish/collect_current_context.py new file mode 100644 index 0000000000..ebcbc6a4aa --- /dev/null +++ b/openpype/plugins/publish/collect_current_context.py @@ -0,0 +1,44 @@ +""" +Provides: + context -> projectName (str) + context -> asset (str) + context -> task (str) +""" + +import pyblish.api +from openpype.pipeline import legacy_io + + +class CollectCurrentContext(pyblish.api.ContextPlugin): + """Collect project context into publish context data. + + Plugin does not override any value if is already set. + """ + + order = pyblish.api.CollectorOrder - 0.5 + label = "Collect Current context" + + def process(self, context): + # Set project name in context data + project_name = context.data.get("projectName") + asset_name = context.data.get("asset") + task_name = context.data.get("task") + if not project_name: + project_name = legacy_io.current_project() + context.data["projectName"] = project_name + + if not asset_name: + asset_name = legacy_io.Session.get("AVALON_ASSET") + context.data["asset"] = asset_name + + if not task_name: + task_name = legacy_io.Session.get("AVALON_TASK") + context.data["task"] = task_name + + # QUESTION should we be explicit with keys? (the same on instances) + # - 'asset' -> 'assetName' + # - 'task' -> 'taskName' + + self.log.info(( + "Collected project context\nProject: {}\nAsset: {}\nTask: {}" + ).format(project_name, asset_name, task_name)) From 477acd1d5ef55d71117d89b467831347b449989e Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 25 Jul 2022 18:32:05 +0200 Subject: [PATCH 517/785] create context plugin makes sure that project name is set --- openpype/plugins/publish/collect_from_create_context.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/openpype/plugins/publish/collect_from_create_context.py b/openpype/plugins/publish/collect_from_create_context.py index d2be633cbe..78bd821bfb 100644 --- a/openpype/plugins/publish/collect_from_create_context.py +++ b/openpype/plugins/publish/collect_from_create_context.py @@ -19,6 +19,9 @@ class CollectFromCreateContext(pyblish.api.ContextPlugin): if not create_context: return + project_name = create_context.project_name + if project_name: + context.data["projectName"] = project_name for created_instance in create_context.instances: instance_data = created_instance.data_to_store() if instance_data["active"]: From 9ce6ea6f363eb24ef79c730a671c119b18ee92c3 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 25 Jul 2022 18:57:23 +0200 Subject: [PATCH 518/785] make sure legacy io is installed --- openpype/plugins/publish/collect_current_context.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/openpype/plugins/publish/collect_current_context.py b/openpype/plugins/publish/collect_current_context.py index ebcbc6a4aa..7e42700d7d 100644 --- a/openpype/plugins/publish/collect_current_context.py +++ b/openpype/plugins/publish/collect_current_context.py @@ -19,7 +19,10 @@ class CollectCurrentContext(pyblish.api.ContextPlugin): label = "Collect Current context" def process(self, context): - # Set project name in context data + # Make sure 'legacy_io' is intalled + legacy_io.install() + + # Check if values are already set project_name = context.data.get("projectName") asset_name = context.data.get("asset") task_name = context.data.get("task") From d585ae526cf1d9306091f242c039e2efa5b29d00 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 25 Jul 2022 19:20:03 +0200 Subject: [PATCH 519/785] get project name from 'context.data["projectName"]' or 'anatomy.project_name' at obvious places --- .../submit_maya_remote_publish_deadline.py | 12 +++++------- .../plugins/publish/collect_anatomy_object.py | 11 +++++++---- .../plugins/publish/collect_avalon_entities.py | 12 +++++++----- openpype/plugins/publish/collect_hierarchy.py | 4 +--- .../plugins/publish/collect_rendered_files.py | 16 +++++----------- .../plugins/publish/collect_resources_path.py | 6 +----- .../plugins/publish/integrate_hero_version.py | 6 ++---- openpype/plugins/publish/integrate_thumbnail.py | 3 +-- 8 files changed, 29 insertions(+), 41 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_remote_publish_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_remote_publish_deadline.py index 57572fcb24..6e53099162 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_remote_publish_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_remote_publish_deadline.py @@ -5,7 +5,6 @@ from maya import cmds from openpype.pipeline import legacy_io, PublishXmlValidationError from openpype.settings import get_project_settings -import openpype.api import pyblish.api @@ -34,7 +33,9 @@ class MayaSubmitRemotePublishDeadline(pyblish.api.InstancePlugin): targets = ["local"] def process(self, instance): - settings = get_project_settings(os.getenv("AVALON_PROJECT")) + project_name = instance.context.data["projectName"] + # TODO settings can be received from 'context.data["project_settings"]' + settings = get_project_settings(project_name) # use setting for publish job on farm, no reason to have it separately deadline_publish_job_sett = (settings["deadline"] ["publish"] @@ -53,9 +54,6 @@ class MayaSubmitRemotePublishDeadline(pyblish.api.InstancePlugin): scene = instance.context.data["currentFile"] scenename = os.path.basename(scene) - # Get project code - project_name = legacy_io.Session["AVALON_PROJECT"] - job_name = "{scene} [PUBLISH]".format(scene=scenename) batch_name = "{code} - {scene}".format(code=project_name, scene=scenename) @@ -107,8 +105,8 @@ class MayaSubmitRemotePublishDeadline(pyblish.api.InstancePlugin): environment = dict({key: os.environ[key] for key in keys if key in os.environ}, **legacy_io.Session) - # TODO replace legacy_io with context.data ? - environment["AVALON_PROJECT"] = legacy_io.Session["AVALON_PROJECT"] + # TODO replace legacy_io with context.data + environment["AVALON_PROJECT"] = project_name environment["AVALON_ASSET"] = legacy_io.Session["AVALON_ASSET"] environment["AVALON_TASK"] = legacy_io.Session["AVALON_TASK"] environment["AVALON_APP_NAME"] = os.environ.get("AVALON_APP_NAME") diff --git a/openpype/plugins/publish/collect_anatomy_object.py b/openpype/plugins/publish/collect_anatomy_object.py index b1415098b6..8128221925 100644 --- a/openpype/plugins/publish/collect_anatomy_object.py +++ b/openpype/plugins/publish/collect_anatomy_object.py @@ -1,24 +1,27 @@ """Collect Anatomy object. Requires: - os.environ -> AVALON_PROJECT + context -> projectName Provides: context -> anatomy (openpype.pipeline.anatomy.Anatomy) """ -import os + import pyblish.api from openpype.pipeline import Anatomy class CollectAnatomyObject(pyblish.api.ContextPlugin): - """Collect Anatomy object into Context""" + """Collect Anatomy object into Context. + + Order offset could be changed to '-0.45'. + """ order = pyblish.api.CollectorOrder - 0.4 label = "Collect Anatomy Object" def process(self, context): - project_name = os.environ.get("AVALON_PROJECT") + project_name = context.data.get("projectName") if project_name is None: raise AssertionError( "Environment `AVALON_PROJECT` is not set." diff --git a/openpype/plugins/publish/collect_avalon_entities.py b/openpype/plugins/publish/collect_avalon_entities.py index 6cd0d136e8..0a7afc086f 100644 --- a/openpype/plugins/publish/collect_avalon_entities.py +++ b/openpype/plugins/publish/collect_avalon_entities.py @@ -1,11 +1,13 @@ """Collect Anatomy and global anatomy data. Requires: - session -> AVALON_PROJECT, AVALON_ASSET + session -> AVALON_ASSET + context -> projectName Provides: - context -> projectEntity - project entity from database - context -> assetEntity - asset entity from database + context -> projectEntity - Project document from database. + context -> assetEntity - Asset document from database only if 'asset' is + set in context. """ import pyblish.api @@ -15,14 +17,14 @@ from openpype.pipeline import legacy_io class CollectAvalonEntities(pyblish.api.ContextPlugin): - """Collect Anatomy into Context""" + """Collect Anatomy into Context.""" order = pyblish.api.CollectorOrder - 0.1 label = "Collect Avalon Entities" def process(self, context): legacy_io.install() - project_name = legacy_io.Session["AVALON_PROJECT"] + project_name = context.data["projectName"] asset_name = legacy_io.Session["AVALON_ASSET"] task_name = legacy_io.Session["AVALON_TASK"] diff --git a/openpype/plugins/publish/collect_hierarchy.py b/openpype/plugins/publish/collect_hierarchy.py index 91d5162d62..687397be8a 100644 --- a/openpype/plugins/publish/collect_hierarchy.py +++ b/openpype/plugins/publish/collect_hierarchy.py @@ -1,7 +1,5 @@ import pyblish.api -from openpype.pipeline import legacy_io - class CollectHierarchy(pyblish.api.ContextPlugin): """Collecting hierarchy from `parents`. @@ -20,7 +18,7 @@ class CollectHierarchy(pyblish.api.ContextPlugin): def process(self, context): temp_context = {} - project_name = legacy_io.Session["AVALON_PROJECT"] + project_name = context.data["projectName"] final_context = {} final_context[project_name] = {} final_context[project_name]['entity_type'] = 'Project' diff --git a/openpype/plugins/publish/collect_rendered_files.py b/openpype/plugins/publish/collect_rendered_files.py index 670e57ed10..8c5d591148 100644 --- a/openpype/plugins/publish/collect_rendered_files.py +++ b/openpype/plugins/publish/collect_rendered_files.py @@ -1,7 +1,7 @@ """Loads publishing context from json and continues in publish process. Requires: - anatomy -> context["anatomy"] *(pyblish.api.CollectorOrder - 0.11) + anatomy -> context["anatomy"] *(pyblish.api.CollectorOrder - 0.4) Provides: context, instances -> All data from previous publishing process. @@ -21,6 +21,7 @@ class CollectRenderedFiles(pyblish.api.ContextPlugin): `OPENPYPE_PUBLISH_DATA`. Those files _MUST_ share same context. """ + order = pyblish.api.CollectorOrder - 0.2 # Keep "filesequence" for backwards compatibility of older jobs targets = ["filesequence", "farm"] @@ -122,19 +123,12 @@ class CollectRenderedFiles(pyblish.api.ContextPlugin): "Missing `OPENPYPE_PUBLISH_DATA`") paths = os.environ["OPENPYPE_PUBLISH_DATA"].split(os.pathsep) - project_name = os.environ.get("AVALON_PROJECT") - if project_name is None: - raise AssertionError( - "Environment `AVALON_PROJECT` was not found." - "Could not set project `root` which may cause issues." - ) - - # TODO root filling should happen after collect Anatomy + # Using already collected Anatomy + anatomy = context.data["anatomy"] self.log.info("Getting root setting for project \"{}\"".format( - project_name + anatomy.project_name )) - anatomy = context.data["anatomy"] self.log.info("anatomy: {}".format(anatomy.roots)) try: session_is_set = False diff --git a/openpype/plugins/publish/collect_resources_path.py b/openpype/plugins/publish/collect_resources_path.py index 8bdf70b529..00f65b8b67 100644 --- a/openpype/plugins/publish/collect_resources_path.py +++ b/openpype/plugins/publish/collect_resources_path.py @@ -13,8 +13,6 @@ import copy import pyblish.api -from openpype.pipeline import legacy_io - class CollectResourcesPath(pyblish.api.InstancePlugin): """Generate directory path where the files and resources will be stored""" @@ -58,7 +56,6 @@ class CollectResourcesPath(pyblish.api.InstancePlugin): "effect", "staticMesh", "skeletalMesh" - ] def process(self, instance): @@ -86,11 +83,10 @@ class CollectResourcesPath(pyblish.api.InstancePlugin): else: # solve deprecated situation when `folder` key is not underneath # `publish` anatomy - project_name = legacy_io.Session["AVALON_PROJECT"] self.log.warning(( "Deprecation warning: Anatomy does not have set `folder`" " key underneath `publish` (in global of for project `{}`)." - ).format(project_name)) + ).format(anatomy.project_name)) file_path = anatomy_filled["publish"]["path"] # Directory diff --git a/openpype/plugins/publish/integrate_hero_version.py b/openpype/plugins/publish/integrate_hero_version.py index 5f97a9bd41..735b7e50fa 100644 --- a/openpype/plugins/publish/integrate_hero_version.py +++ b/openpype/plugins/publish/integrate_hero_version.py @@ -71,7 +71,7 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin): template_key = self._get_template_key(instance) anatomy = instance.context.data["anatomy"] - project_name = legacy_io.Session["AVALON_PROJECT"] + project_name = anatomy.project_name if template_key not in anatomy.templates: self.log.warning(( "!!! Anatomy of project \"{}\" does not have set" @@ -454,7 +454,6 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin): ) if bulk_writes: - project_name = legacy_io.Session["AVALON_PROJECT"] legacy_io.database[project_name].bulk_write( bulk_writes ) @@ -517,11 +516,10 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin): anatomy_filled = anatomy.format(template_data) # solve deprecated situation when `folder` key is not underneath # `publish` anatomy - project_name = legacy_io.Session["AVALON_PROJECT"] self.log.warning(( "Deprecation warning: Anatomy does not have set `folder`" " key underneath `publish` (in global of for project `{}`)." - ).format(project_name)) + ).format(anatomy.project_name)) file_path = anatomy_filled[template_key]["path"] # Directory diff --git a/openpype/plugins/publish/integrate_thumbnail.py b/openpype/plugins/publish/integrate_thumbnail.py index fd50858a91..8ae0dd2d60 100644 --- a/openpype/plugins/publish/integrate_thumbnail.py +++ b/openpype/plugins/publish/integrate_thumbnail.py @@ -39,9 +39,8 @@ class IntegrateThumbnails(pyblish.api.InstancePlugin): ) return - project_name = legacy_io.Session["AVALON_PROJECT"] - anatomy = instance.context.data["anatomy"] + project_name = anatomy.project_name if "publish" not in anatomy.templates: self.log.warning("Anatomy is missing the \"publish\" key!") return From 2453892f3fe12f1eee9615f94ac5c88ab6414f94 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 25 Jul 2022 19:21:09 +0200 Subject: [PATCH 520/785] raise KnownPublishError instead of AssertionError --- openpype/plugins/publish/collect_anatomy_object.py | 8 ++++---- .../plugins/publish/collect_avalon_entities.py | 9 +++++---- openpype/plugins/publish/collect_rendered_files.py | 14 +++++++++++--- 3 files changed, 20 insertions(+), 11 deletions(-) diff --git a/openpype/plugins/publish/collect_anatomy_object.py b/openpype/plugins/publish/collect_anatomy_object.py index 8128221925..725cae2b14 100644 --- a/openpype/plugins/publish/collect_anatomy_object.py +++ b/openpype/plugins/publish/collect_anatomy_object.py @@ -8,7 +8,7 @@ Provides: """ import pyblish.api -from openpype.pipeline import Anatomy +from openpype.pipeline import Anatomy, KnownPublishError class CollectAnatomyObject(pyblish.api.ContextPlugin): @@ -23,10 +23,10 @@ class CollectAnatomyObject(pyblish.api.ContextPlugin): def process(self, context): project_name = context.data.get("projectName") if project_name is None: - raise AssertionError( - "Environment `AVALON_PROJECT` is not set." + raise KnownPublishError(( + "Project name is not set in 'projectName'." "Could not initialize project's Anatomy." - ) + )) context.data["anatomy"] = Anatomy(project_name) diff --git a/openpype/plugins/publish/collect_avalon_entities.py b/openpype/plugins/publish/collect_avalon_entities.py index 0a7afc086f..3b05b6ae98 100644 --- a/openpype/plugins/publish/collect_avalon_entities.py +++ b/openpype/plugins/publish/collect_avalon_entities.py @@ -13,7 +13,7 @@ Provides: import pyblish.api from openpype.client import get_project, get_asset_by_name -from openpype.pipeline import legacy_io +from openpype.pipeline import legacy_io, KnownPublishError class CollectAvalonEntities(pyblish.api.ContextPlugin): @@ -29,9 +29,10 @@ class CollectAvalonEntities(pyblish.api.ContextPlugin): task_name = legacy_io.Session["AVALON_TASK"] project_entity = get_project(project_name) - assert project_entity, ( - "Project '{0}' was not found." - ).format(project_name) + if not project_entity: + raise KnownPublishError( + "Project '{0}' was not found.".format(project_name) + ) self.log.debug("Collected Project \"{}\"".format(project_entity)) context.data["projectEntity"] = project_entity diff --git a/openpype/plugins/publish/collect_rendered_files.py b/openpype/plugins/publish/collect_rendered_files.py index 8c5d591148..8f8d0a5eeb 100644 --- a/openpype/plugins/publish/collect_rendered_files.py +++ b/openpype/plugins/publish/collect_rendered_files.py @@ -12,7 +12,7 @@ import json import pyblish.api -from openpype.pipeline import legacy_io +from openpype.pipeline import legacy_io, KnownPublishError class CollectRenderedFiles(pyblish.api.ContextPlugin): @@ -20,6 +20,10 @@ class CollectRenderedFiles(pyblish.api.ContextPlugin): This collector will try to find json files in provided `OPENPYPE_PUBLISH_DATA`. Those files _MUST_ share same context. + Note: + We should split this collector and move the part which handle reading + of file and it's context from session data before collect anatomy + and instance creation dependent on anatomy can be done here. """ order = pyblish.api.CollectorOrder - 0.2 @@ -119,8 +123,12 @@ class CollectRenderedFiles(pyblish.api.ContextPlugin): def process(self, context): self._context = context - assert os.environ.get("OPENPYPE_PUBLISH_DATA"), ( - "Missing `OPENPYPE_PUBLISH_DATA`") + if not os.environ.get("OPENPYPE_PUBLISH_DATA"): + raise KnownPublishError("Missing `OPENPYPE_PUBLISH_DATA`") + + # QUESTION + # Do we support (or want support) multiple files in the variable? + # - what if they have different context? paths = os.environ["OPENPYPE_PUBLISH_DATA"].split(os.pathsep) # Using already collected Anatomy From d2e1fe84456feda9c3a8432d665715c5408c2d57 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 25 Jul 2022 22:16:06 +0200 Subject: [PATCH 521/785] nuke: fixing local rendering slate workflow --- .../hosts/nuke/plugins/publish/extract_render_local.py | 8 -------- .../hosts/nuke/plugins/publish/extract_slate_frame.py | 1 + 2 files changed, 1 insertion(+), 8 deletions(-) diff --git a/openpype/hosts/nuke/plugins/publish/extract_render_local.py b/openpype/hosts/nuke/plugins/publish/extract_render_local.py index 1595fe03fb..1b3bf46b71 100644 --- a/openpype/hosts/nuke/plugins/publish/extract_render_local.py +++ b/openpype/hosts/nuke/plugins/publish/extract_render_local.py @@ -31,10 +31,6 @@ class NukeRenderLocal(openpype.api.Extractor): first_frame = instance.data.get("frameStartHandle", None) - # exception for slate workflow - if "slate" in families: - first_frame -= 1 - last_frame = instance.data.get("frameEndHandle", None) node_subset_name = instance.data.get("name", None) @@ -68,10 +64,6 @@ class NukeRenderLocal(openpype.api.Extractor): int(last_frame) ) - # exception for slate workflow - if "slate" in families: - first_frame += 1 - ext = node["file_type"].value() if "representations" not in instance.data: diff --git a/openpype/hosts/nuke/plugins/publish/extract_slate_frame.py b/openpype/hosts/nuke/plugins/publish/extract_slate_frame.py index 99ade4cf9b..ccfaf0ed46 100644 --- a/openpype/hosts/nuke/plugins/publish/extract_slate_frame.py +++ b/openpype/hosts/nuke/plugins/publish/extract_slate_frame.py @@ -13,6 +13,7 @@ from openpype.hosts.nuke.api import ( get_view_process_node ) + class ExtractSlateFrame(openpype.api.Extractor): """Extracts movie and thumbnail with baked in luts From 84eb91acb7d4fe41a122543493372b7dea6012a0 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Tue, 26 Jul 2022 08:34:41 +0800 Subject: [PATCH 522/785] bugfix/OP-3356_Maya-Review-Image-plane-attribute --- openpype/hosts/maya/plugins/publish/extract_thumbnail.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_thumbnail.py b/openpype/hosts/maya/plugins/publish/extract_thumbnail.py index 4f28aa167c..7885c1ebc9 100644 --- a/openpype/hosts/maya/plugins/publish/extract_thumbnail.py +++ b/openpype/hosts/maya/plugins/publish/extract_thumbnail.py @@ -116,7 +116,11 @@ class ExtractThumbnail(openpype.api.Extractor): path = capture.capture(**preset) playblast = self._fix_playblast_output_path(path) - _, thumbnail = os.path.split(playblast) + image_plane = instance.data.get("imagePlane") + if image_plane: + _, thumbnail = os.path.split(playblast) + else: + return self.log.info("file list {}".format(thumbnail)) From 3755c5bf05d352de26647f05b5c2940d4022c30f Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 26 Jul 2022 10:45:35 +0200 Subject: [PATCH 523/785] implemented helper method to get representation path --- .../publish/integrate_ftrack_instances.py | 24 +++++++++++++++++++ 1 file changed, 24 insertions(+) diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py index c8d9e4117d..09a8672d77 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py @@ -360,6 +360,30 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): )) instance.data["ftrackComponentsList"] = component_list + def _get_repre_path(self, instance, repre, only_published): + published_path = repre.get("published_path") + if published_path: + published_path = os.path.normpath(published_path) + if os.path.exists(published_path): + return published_path + + if only_published: + return None + + comp_files = repre["files"] + if isinstance(comp_files, (tuple, list, set)): + filename = comp_files[0] + else: + filename = comp_files + + staging_dir = repre.get("stagingDir") + if not staging_dir: + staging_dir = instance.data["stagingDir"] + src_path = os.path.normpath(os.path.join(staging_dir, filename)) + if os.path.exists(src_path): + return src_path + return None + def _get_asset_version_status_name(self, instance): if not self.asset_versions_status_profiles: return None From 0474456e77c038af1cd1905e4a586cc8a6e27aae Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 26 Jul 2022 10:53:06 +0200 Subject: [PATCH 524/785] use helper method to calculate representation path for integration --- .../publish/integrate_ftrack_instances.py | 35 +++++++++---------- 1 file changed, 17 insertions(+), 18 deletions(-) diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py index 09a8672d77..f1a4f28fd1 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py @@ -58,7 +58,7 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): version_number = int(instance_version) family = instance.data["family"] - family_low = instance.data["family"].lower() + family_low = family.lower() asset_type = instance.data.get("ftrackFamily") if not asset_type and family_low in self.family_mapping: @@ -140,24 +140,16 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): first_thumbnail_component = None first_thumbnail_component_repre = None for repre in thumbnail_representations: - published_path = repre.get("published_path") - if not published_path: - comp_files = repre["files"] - if isinstance(comp_files, (tuple, list, set)): - filename = comp_files[0] - else: - filename = comp_files - - published_path = os.path.join( - repre["stagingDir"], filename + repre_path = self._get_repre_path(instance, repre, False) + if not repre_path: + self.log.warning( + "Published path is not set and source was removed." ) - if not os.path.exists(published_path): - continue - repre["published_path"] = published_path + continue # Create copy of base comp item and append it thumbnail_item = copy.deepcopy(base_component_item) - thumbnail_item["component_path"] = repre["published_path"] + thumbnail_item["component_path"] = repre_path thumbnail_item["component_data"] = { "name": "thumbnail" } @@ -216,6 +208,13 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): extended_asset_name = "" multiple_reviewable = len(review_representations) > 1 for repre in review_representations: + repre_path = self._get_repre_path(instance, repre, False) + if not repre_path: + self.log.warning( + "Published path is not set and source was removed." + ) + continue + # Create copy of base comp item and append it review_item = copy.deepcopy(base_component_item) @@ -270,7 +269,7 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): fps = instance_fps # Change location - review_item["component_path"] = repre["published_path"] + review_item["component_path"] = repre_path # Change component data review_item["component_data"] = { # Default component name is "main". @@ -327,7 +326,7 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): # Add others representations as component for repre in other_representations: - published_path = repre.get("published_path") + published_path = self._get_repre_path(instance, repre, True) if not published_path: continue # Create copy of base comp item and append it @@ -368,7 +367,7 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): return published_path if only_published: - return None + return published_path comp_files = repre["files"] if isinstance(comp_files, (tuple, list, set)): From 266bce0f48070310f8b44ebfc25ab8b83ba51698 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 26 Jul 2022 10:53:13 +0200 Subject: [PATCH 525/785] reduce duplicated variables --- .../modules/ftrack/plugins/publish/integrate_ftrack_api.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_api.py b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_api.py index c4f7b1f05d..58591bacfd 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_api.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_api.py @@ -26,8 +26,6 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin): families = ["ftrack"] def process(self, instance): - session = instance.context.data["ftrackSession"] - context = instance.context component_list = instance.data.get("ftrackComponentsList") if not component_list: self.log.info( @@ -36,8 +34,8 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin): ) return - session = instance.context.data["ftrackSession"] context = instance.context + session = context.data["ftrackSession"] parent_entity = None default_asset_name = None From 8e5a2a082ee18b46f3223c6212fdd65510dd2bee Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 26 Jul 2022 10:57:23 +0200 Subject: [PATCH 526/785] added docstring to ftrack get repre path method --- .../publish/integrate_ftrack_instances.py | 20 +++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py index f1a4f28fd1..8eb8479183 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py @@ -360,6 +360,26 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): instance.data["ftrackComponentsList"] = component_list def _get_repre_path(self, instance, repre, only_published): + """Get representation path that can be used for integration. + + When 'only_published' is set to true the validation of path is not + relevant. In that case we just need what is set in 'published_path' + as "reference". The reference is not used to get or upload the file but + for reference where the file was published. + + Args: + instance (pyblish.Instance): Processed instance object. Used + for source of staging dir if representation does not have + filled it. + repre (dict): Representation on instance which could be and + could not be integrated with main integrator. + only_published (bool): Care only about published paths and + ignore if filepath is not existing anymore. + + Returns: + str: Path to representation file. + None: Path is not filled or does not exists. + """ published_path = repre.get("published_path") if published_path: published_path = os.path.normpath(published_path) From fcf6e70107cf609c9a561ec2821455100b9faa9e Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 26 Jul 2022 11:03:24 +0200 Subject: [PATCH 527/785] add missing empty line --- .../modules/ftrack/plugins/publish/integrate_ftrack_instances.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py index 8eb8479183..d937e64790 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py @@ -380,6 +380,7 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): str: Path to representation file. None: Path is not filled or does not exists. """ + published_path = repre.get("published_path") if published_path: published_path = os.path.normpath(published_path) From 34601a6243dc39e09ab208a9e7e859a8e84e5d67 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Tue, 26 Jul 2022 17:05:39 +0800 Subject: [PATCH 528/785] fix the bug of loading image plane --- .../maya/plugins/publish/extract_thumbnail.py | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_thumbnail.py b/openpype/hosts/maya/plugins/publish/extract_thumbnail.py index 7885c1ebc9..47e9a907a0 100644 --- a/openpype/hosts/maya/plugins/publish/extract_thumbnail.py +++ b/openpype/hosts/maya/plugins/publish/extract_thumbnail.py @@ -100,6 +100,13 @@ class ExtractThumbnail(openpype.api.Extractor): # camera. if preset.pop("isolate_view", False) and instance.data.get("isolate"): preset["isolate"] = instance.data["setMembers"] + + #Show or Hide Image Plane + image_plane = instance.data.get("imagePlane", True) + if "viewport_options" in preset: + preset["viewport_options"]["imagePlane"] = image_plane + else: + preset["viewport_options"] = {"imagePlane": image_plane} with lib.maintained_time(): # Force viewer to False in call to capture because we have our own @@ -116,11 +123,8 @@ class ExtractThumbnail(openpype.api.Extractor): path = capture.capture(**preset) playblast = self._fix_playblast_output_path(path) - image_plane = instance.data.get("imagePlane") - if image_plane: - _, thumbnail = os.path.split(playblast) - else: - return + _, thumbnail = os.path.split(playblast) + self.log.info("file list {}".format(thumbnail)) From cd87b8ba2a5ae9a008123f999e958fe7c1562b54 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Tue, 26 Jul 2022 17:11:07 +0800 Subject: [PATCH 529/785] bugfix-maya-review-image-plane_attribute --- openpype/hosts/maya/plugins/publish/extract_thumbnail.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_thumbnail.py b/openpype/hosts/maya/plugins/publish/extract_thumbnail.py index 47e9a907a0..0d537810c0 100644 --- a/openpype/hosts/maya/plugins/publish/extract_thumbnail.py +++ b/openpype/hosts/maya/plugins/publish/extract_thumbnail.py @@ -101,7 +101,7 @@ class ExtractThumbnail(openpype.api.Extractor): if preset.pop("isolate_view", False) and instance.data.get("isolate"): preset["isolate"] = instance.data["setMembers"] - #Show or Hide Image Plane + # Show or Hide Image Plane image_plane = instance.data.get("imagePlane", True) if "viewport_options" in preset: preset["viewport_options"]["imagePlane"] = image_plane From 642d6ef407630ef2a9dad37551b6725569a7b4d7 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 26 Jul 2022 11:15:48 +0200 Subject: [PATCH 530/785] fix typo --- openpype/plugins/publish/extract_thumbnail.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/plugins/publish/extract_thumbnail.py b/openpype/plugins/publish/extract_thumbnail.py index c1eee71376..c154275322 100644 --- a/openpype/plugins/publish/extract_thumbnail.py +++ b/openpype/plugins/publish/extract_thumbnail.py @@ -72,7 +72,7 @@ class ExtractThumbnail(pyblish.api.InstancePlugin): # - this is to avoid "override" of source file dst_staging = tempfile.mkdtemp(prefix="pyblish_tmp_") self.log.debug( - "Create temp directory {} for thumbnail".formap(dst_staging) + "Create temp directory {} for thumbnail".format(dst_staging) ) # Store new staging to cleanup paths instance.context.data["cleanupFullPaths"].append(dst_staging) From 2823cc2d1545adebca84c32aae9fb1e6f83db9d7 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 26 Jul 2022 11:40:02 +0200 Subject: [PATCH 531/785] removed unused git progress --- openpype/lib/git_progress.py | 86 ------------------------------------ 1 file changed, 86 deletions(-) delete mode 100644 openpype/lib/git_progress.py diff --git a/openpype/lib/git_progress.py b/openpype/lib/git_progress.py deleted file mode 100644 index 331b7b6745..0000000000 --- a/openpype/lib/git_progress.py +++ /dev/null @@ -1,86 +0,0 @@ -import git -from tqdm import tqdm - - -class _GitProgress(git.remote.RemoteProgress): - """ Class handling displaying progress during git operations. - - This is using **tqdm** for showing progress bars. As **GitPython** - is parsing progress directly from git command, it is somehow unreliable - as in some operations it is difficult to get total count of iterations - to display meaningful progress bar. - - """ - _t = None - _code = 0 - _current_status = '' - _current_max = '' - - _description = { - 256: "Checking out files", - 4: "Counting objects", - 128: "Finding sources", - 32: "Receiving objects", - 64: "Resolving deltas", - 16: "Writing objects" - } - - def __init__(self): - super().__init__() - - def __del__(self): - if self._t is not None: - self._t.close() - - def _detroy_tqdm(self): - """ Used to close tqdm when operation ended. - - """ - if self._t is not None: - self._t.close() - self._t = None - - def _check_mask(self, opcode: int) -> bool: - """" Add meaningful description to **GitPython** opcodes. - - :param opcode: OP_MASK opcode - :type opcode: int - :return: String description of opcode - :rtype: str - - .. seealso:: For opcodes look at :class:`git.RemoteProgress` - - """ - if opcode & self.COUNTING: - return self._description.get(self.COUNTING) - elif opcode & self.CHECKING_OUT: - return self._description.get(self.CHECKING_OUT) - elif opcode & self.WRITING: - return self._description.get(self.WRITING) - elif opcode & self.RECEIVING: - return self._description.get(self.RECEIVING) - elif opcode & self.RESOLVING: - return self._description.get(self.RESOLVING) - elif opcode & self.FINDING_SOURCES: - return self._description.get(self.FINDING_SOURCES) - else: - return "Processing" - - def update(self, op_code, cur_count, max_count=None, message=''): - """ Called when git operation update progress. - - .. seealso:: For more details see - :func:`git.objects.submodule.base.Submodule.update` - `Documentation `_ - - """ - code = self._check_mask(op_code) - if self._current_status != code or self._current_max != max_count: - self._current_max = max_count - self._current_status = code - self._detroy_tqdm() - self._t = tqdm(total=max_count) - self._t.set_description(" . {}".format(code)) - - self._t.update(cur_count) From 297aaa6ee1a265119783b6f9355054d443e3af27 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 26 Jul 2022 11:41:15 +0200 Subject: [PATCH 532/785] removed unused function 'timeit' from log lib --- openpype/lib/__init__.py | 4 +--- openpype/lib/log.py | 22 ---------------------- 2 files changed, 1 insertion(+), 25 deletions(-) diff --git a/openpype/lib/__init__.py b/openpype/lib/__init__.py index fb52a9aca7..c2fa9f0acb 100644 --- a/openpype/lib/__init__.py +++ b/openpype/lib/__init__.py @@ -63,7 +63,7 @@ from .execute import ( path_to_subprocess_arg, CREATE_NO_WINDOW ) -from .log import PypeLogger, timeit +from .log import PypeLogger from .path_templates import ( merge_dict, @@ -375,8 +375,6 @@ __all__ = [ "validate_mongo_connection", "OpenPypeMongoConnection", - "timeit", - "is_overlapping_otio_ranges", "otio_range_with_handles", "convert_to_padded_path", diff --git a/openpype/lib/log.py b/openpype/lib/log.py index 2cdb7ec8e4..33d3f5c409 100644 --- a/openpype/lib/log.py +++ b/openpype/lib/log.py @@ -483,25 +483,3 @@ class PypeLogger: cls.initialize() return OpenPypeMongoConnection.get_mongo_client() - - -def timeit(method): - """Print time in function. - - For debugging. - - """ - log = logging.getLogger() - - def timed(*args, **kw): - ts = time.time() - result = method(*args, **kw) - te = time.time() - if 'log_time' in kw: - name = kw.get('log_name', method.__name__.upper()) - kw['log_time'][name] = int((te - ts) * 1000) - else: - log.debug('%r %2.2f ms' % (method.__name__, (te - ts) * 1000)) - print('%r %2.2f ms' % (method.__name__, (te - ts) * 1000)) - return result - return timed From 31a3911d4e0e756d5a3d62e957f771cd3a77aece Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 26 Jul 2022 11:48:24 +0200 Subject: [PATCH 533/785] move functions from openpype.lib.config to openpype.lib.dateutils --- openpype/lib/__init__.py | 3 +- openpype/lib/config.py | 103 ++++++------------ openpype/lib/dateutils.py | 95 ++++++++++++++++ .../event_handlers_user/action_delivery.py | 4 +- openpype/plugins/load/delivery.py | 4 +- .../plugins/publish/collect_datetime_data.py | 4 +- 6 files changed, 134 insertions(+), 79 deletions(-) create mode 100644 openpype/lib/dateutils.py diff --git a/openpype/lib/__init__.py b/openpype/lib/__init__.py index c2fa9f0acb..2d99efbe28 100644 --- a/openpype/lib/__init__.py +++ b/openpype/lib/__init__.py @@ -83,8 +83,9 @@ from .anatomy import ( Anatomy ) -from .config import ( +from .dateutils import ( get_datetime_data, + get_timestamp, get_formatted_current_time ) diff --git a/openpype/lib/config.py b/openpype/lib/config.py index 57e8efa57d..26822649e4 100644 --- a/openpype/lib/config.py +++ b/openpype/lib/config.py @@ -1,82 +1,41 @@ -# -*- coding: utf-8 -*- -"""Get configuration data.""" -import datetime +import warnings +import functools -def get_datetime_data(datetime_obj=None): - """Returns current datetime data as dictionary. +class ConfigDeprecatedWarning(DeprecationWarning): + pass - Args: - datetime_obj (datetime): Specific datetime object - Returns: - dict: prepared date & time data +def deprecated(func): + """Mark functions as deprecated. - Available keys: - "d" - in shortest possible way. - "dd" - with 2 digits. - "ddd" - shortened week day. e.g.: `Mon`, ... - "dddd" - full name of week day. e.g.: `Monday`, ... - "m" - in shortest possible way. e.g.: `1` if January - "mm" - with 2 digits. - "mmm" - shortened month name. e.g.: `Jan`, ... - "mmmm" - full month name. e.g.: `January`, ... - "yy" - shortened year. e.g.: `19`, `20`, ... - "yyyy" - full year. e.g.: `2019`, `2020`, ... - "H" - shortened hours. - "HH" - with 2 digits. - "h" - shortened hours. - "hh" - with 2 digits. - "ht" - AM or PM. - "M" - shortened minutes. - "MM" - with 2 digits. - "S" - shortened seconds. - "SS" - with 2 digits. + It will result in a warning being emitted when the function is used. """ - if not datetime_obj: - datetime_obj = datetime.datetime.now() - - year = datetime_obj.strftime("%Y") - - month = datetime_obj.strftime("%m") - month_name_full = datetime_obj.strftime("%B") - month_name_short = datetime_obj.strftime("%b") - day = datetime_obj.strftime("%d") - - weekday_full = datetime_obj.strftime("%A") - weekday_short = datetime_obj.strftime("%a") - - hours = datetime_obj.strftime("%H") - hours_midday = datetime_obj.strftime("%I") - hour_midday_type = datetime_obj.strftime("%p") - minutes = datetime_obj.strftime("%M") - seconds = datetime_obj.strftime("%S") - - return { - "d": str(int(day)), - "dd": str(day), - "ddd": weekday_short, - "dddd": weekday_full, - "m": str(int(month)), - "mm": str(month), - "mmm": month_name_short, - "mmmm": month_name_full, - "yy": str(year[2:]), - "yyyy": str(year), - "H": str(int(hours)), - "HH": str(hours), - "h": str(int(hours_midday)), - "hh": str(hours_midday), - "ht": hour_midday_type, - "M": str(int(minutes)), - "MM": str(minutes), - "S": str(int(seconds)), - "SS": str(seconds), - } + @functools.wraps(func) + def new_func(*args, **kwargs): + warnings.simplefilter("always", ConfigDeprecatedWarning) + warnings.warn( + ( + "Deprecated import of function '{}'." + " Class was moved to 'openpype.lib.dateutils.{}'." + " Please change your imports." + ).format(func.__name__), + category=ConfigDeprecatedWarning + ) + return func(*args, **kwargs) + return new_func +@deprecated +def get_datetime_data(datetime_obj=None): + from .dateutils import get_datetime_data + + return get_datetime_data(datetime_obj) + + +@deprecated def get_formatted_current_time(): - return datetime.datetime.now().strftime( - "%Y%m%dT%H%M%SZ" - ) + from .dateutils import get_formatted_current_time + + return get_formatted_current_time() diff --git a/openpype/lib/dateutils.py b/openpype/lib/dateutils.py new file mode 100644 index 0000000000..68cd1d1c5b --- /dev/null +++ b/openpype/lib/dateutils.py @@ -0,0 +1,95 @@ +# -*- coding: utf-8 -*- +"""Get configuration data.""" +import datetime + + +def get_datetime_data(datetime_obj=None): + """Returns current datetime data as dictionary. + + Args: + datetime_obj (datetime): Specific datetime object + + Returns: + dict: prepared date & time data + + Available keys: + "d" - in shortest possible way. + "dd" - with 2 digits. + "ddd" - shortened week day. e.g.: `Mon`, ... + "dddd" - full name of week day. e.g.: `Monday`, ... + "m" - in shortest possible way. e.g.: `1` if January + "mm" - with 2 digits. + "mmm" - shortened month name. e.g.: `Jan`, ... + "mmmm" - full month name. e.g.: `January`, ... + "yy" - shortened year. e.g.: `19`, `20`, ... + "yyyy" - full year. e.g.: `2019`, `2020`, ... + "H" - shortened hours. + "HH" - with 2 digits. + "h" - shortened hours. + "hh" - with 2 digits. + "ht" - AM or PM. + "M" - shortened minutes. + "MM" - with 2 digits. + "S" - shortened seconds. + "SS" - with 2 digits. + """ + + if not datetime_obj: + datetime_obj = datetime.datetime.now() + + year = datetime_obj.strftime("%Y") + + month = datetime_obj.strftime("%m") + month_name_full = datetime_obj.strftime("%B") + month_name_short = datetime_obj.strftime("%b") + day = datetime_obj.strftime("%d") + + weekday_full = datetime_obj.strftime("%A") + weekday_short = datetime_obj.strftime("%a") + + hours = datetime_obj.strftime("%H") + hours_midday = datetime_obj.strftime("%I") + hour_midday_type = datetime_obj.strftime("%p") + minutes = datetime_obj.strftime("%M") + seconds = datetime_obj.strftime("%S") + + return { + "d": str(int(day)), + "dd": str(day), + "ddd": weekday_short, + "dddd": weekday_full, + "m": str(int(month)), + "mm": str(month), + "mmm": month_name_short, + "mmmm": month_name_full, + "yy": str(year[2:]), + "yyyy": str(year), + "H": str(int(hours)), + "HH": str(hours), + "h": str(int(hours_midday)), + "hh": str(hours_midday), + "ht": hour_midday_type, + "M": str(int(minutes)), + "MM": str(minutes), + "S": str(int(seconds)), + "SS": str(seconds), + } + + +def get_timestamp(datetime_obj=None): + """Get standardized timestamp from datetime object. + + Args: + datetime_obj (datetime.datetime): Object of datetime. Current time + is used if not passed. + """ + + if datetime_obj is None: + datetime_obj = datetime.datetime.now() + return datetime_obj.strftime( + "%Y%m%dT%H%M%SZ" + ) + + +def get_formatted_current_time(): + return get_timestamp() diff --git a/openpype/modules/ftrack/event_handlers_user/action_delivery.py b/openpype/modules/ftrack/event_handlers_user/action_delivery.py index ad82af39a3..eec245070c 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_delivery.py +++ b/openpype/modules/ftrack/event_handlers_user/action_delivery.py @@ -16,7 +16,7 @@ from openpype_modules.ftrack.lib.avalon_sync import CUST_ATTR_ID_KEY from openpype_modules.ftrack.lib.custom_attributes import ( query_custom_attributes ) -from openpype.lib import config +from openpype.lib.dateutils import get_datetime_data from openpype.lib.delivery import ( path_from_representation, get_format_dict, @@ -555,7 +555,7 @@ class Delivery(BaseAction): format_dict = get_format_dict(anatomy, location_path) - datetime_data = config.get_datetime_data() + datetime_data = get_datetime_data() for repre in repres_to_deliver: source_path = repre.get("data", {}).get("path") debug_msg = "Processing representation {}".format(repre["_id"]) diff --git a/openpype/plugins/load/delivery.py b/openpype/plugins/load/delivery.py index 7585ea4c59..f6e1d4f06b 100644 --- a/openpype/plugins/load/delivery.py +++ b/openpype/plugins/load/delivery.py @@ -4,10 +4,10 @@ from collections import defaultdict from Qt import QtWidgets, QtCore, QtGui from openpype.client import get_representations -from openpype.lib import config from openpype.pipeline import load, Anatomy from openpype import resources, style +from openpype.lib.dateutils import get_datetime_data from openpype.lib.delivery import ( sizeof_fmt, path_from_representation, @@ -160,7 +160,7 @@ class DeliveryOptionsDialog(QtWidgets.QDialog): selected_repres = self._get_selected_repres() - datetime_data = config.get_datetime_data() + datetime_data = get_datetime_data() template_name = self.dropdown.currentText() format_dict = get_format_dict(self.anatomy, self.root_line_edit.text()) for repre in self._representations: diff --git a/openpype/plugins/publish/collect_datetime_data.py b/openpype/plugins/publish/collect_datetime_data.py index 1675ae1a98..f46d616fb3 100644 --- a/openpype/plugins/publish/collect_datetime_data.py +++ b/openpype/plugins/publish/collect_datetime_data.py @@ -5,7 +5,7 @@ Provides: """ import pyblish.api -from openpype.api import config +from openpype.lib.dateutils import get_datetime_data class CollectDateTimeData(pyblish.api.ContextPlugin): @@ -15,4 +15,4 @@ class CollectDateTimeData(pyblish.api.ContextPlugin): def process(self, context): key = "datetimeData" if key not in context.data: - context.data[key] = config.get_datetime_data() + context.data[key] = get_datetime_data() From 09001afa223baacd6748e0a44a6823199a289300 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 26 Jul 2022 11:57:36 +0200 Subject: [PATCH 534/785] reduced 'Pype' from class names in logger --- openpype/lib/log.py | 27 +++++++++++++-------------- 1 file changed, 13 insertions(+), 14 deletions(-) diff --git a/openpype/lib/log.py b/openpype/lib/log.py index 33d3f5c409..aaacb7b004 100644 --- a/openpype/lib/log.py +++ b/openpype/lib/log.py @@ -41,13 +41,13 @@ except ImportError: USE_UNICODE = hasattr(__builtins__, "unicode") -class PypeStreamHandler(logging.StreamHandler): +class LogStreamHandler(logging.StreamHandler): """ StreamHandler class designed to handle utf errors in python 2.x hosts. """ def __init__(self, stream=None): - super(PypeStreamHandler, self).__init__(stream) + super(LogStreamHandler, self).__init__(stream) self.enabled = True def enable(self): @@ -56,7 +56,6 @@ class PypeStreamHandler(logging.StreamHandler): Used to silence output """ self.enabled = True - pass def disable(self): """ Disable StreamHandler @@ -107,13 +106,13 @@ class PypeStreamHandler(logging.StreamHandler): self.handleError(record) -class PypeFormatter(logging.Formatter): +class LogFormatter(logging.Formatter): DFT = '%(levelname)s >>> { %(name)s }: [ %(message)s ]' default_formatter = logging.Formatter(DFT) def __init__(self, formats): - super(PypeFormatter, self).__init__() + super(LogFormatter, self).__init__() self.formatters = {} for loglevel in formats: self.formatters[loglevel] = logging.Formatter(formats[loglevel]) @@ -141,7 +140,7 @@ class PypeFormatter(logging.Formatter): return out -class PypeMongoFormatter(logging.Formatter): +class MongoFormatter(logging.Formatter): DEFAULT_PROPERTIES = logging.LogRecord( '', '', '', '', '', '', '', '').__dict__.keys() @@ -239,7 +238,7 @@ class PypeLogger: for handler in logger.handlers: if isinstance(handler, MongoHandler): add_mongo_handler = False - elif isinstance(handler, PypeStreamHandler): + elif isinstance(handler, LogStreamHandler): add_console_handler = False if add_console_handler: @@ -292,7 +291,7 @@ class PypeLogger: "username": components["username"], "password": components["password"], "capped": True, - "formatter": PypeMongoFormatter() + "formatter": MongoFormatter() } if components["port"] is not None: kwargs["port"] = int(components["port"]) @@ -303,10 +302,10 @@ class PypeLogger: @classmethod def _get_console_handler(cls): - formatter = PypeFormatter(cls.FORMAT_FILE) - console_handler = PypeStreamHandler() + formatter = LogFormatter(cls.FORMAT_FILE) + console_handler = LogStreamHandler() - console_handler.set_name("PypeStreamHandler") + console_handler.set_name("LogStreamHandler") console_handler.setFormatter(formatter) return console_handler @@ -417,9 +416,9 @@ class PypeLogger: def get_process_name(cls): """Process name that is like "label" of a process. - Pype's logging can be used from pype itseld of from hosts. Even in Pype - it's good to know if logs are from Pype tray or from pype's event - server. This should help to identify that information. + OpenPype's logging can be used from OpenPyppe itself of from hosts. + Even in OpenPype process it's good to know if logs are from tray or + from other cli commands. This should help to identify that information. """ if cls._process_name is not None: return cls._process_name From 14224407261d89b19424b4ac3c6608b10796cb01 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 26 Jul 2022 11:58:35 +0200 Subject: [PATCH 535/785] make main class 'Logger' and keep 'PypeLogger' with commented deprecation log --- openpype/api.py | 3 +-- openpype/lib/__init__.py | 7 ++++++- openpype/lib/log.py | 16 ++++++++++++++-- 3 files changed, 21 insertions(+), 5 deletions(-) diff --git a/openpype/api.py b/openpype/api.py index fac2ae572b..c2227c1a52 100644 --- a/openpype/api.py +++ b/openpype/api.py @@ -9,6 +9,7 @@ from .settings import ( ) from .lib import ( PypeLogger, + Logger, Anatomy, config, execute, @@ -58,8 +59,6 @@ from .action import ( RepairContextAction ) -# for backward compatibility with Pype 2 -Logger = PypeLogger __all__ = [ "get_system_settings", diff --git a/openpype/lib/__init__.py b/openpype/lib/__init__.py index 2d99efbe28..31cd5e7510 100644 --- a/openpype/lib/__init__.py +++ b/openpype/lib/__init__.py @@ -63,7 +63,10 @@ from .execute import ( path_to_subprocess_arg, CREATE_NO_WINDOW ) -from .log import PypeLogger +from .log import ( + Logger, + PypeLogger, +) from .path_templates import ( merge_dict, @@ -371,7 +374,9 @@ __all__ = [ "get_datetime_data", "get_formatted_current_time", + "Logger", "PypeLogger", + "get_default_components", "validate_mongo_connection", "OpenPypeMongoConnection", diff --git a/openpype/lib/log.py b/openpype/lib/log.py index aaacb7b004..dc030a6430 100644 --- a/openpype/lib/log.py +++ b/openpype/lib/log.py @@ -160,7 +160,7 @@ class MongoFormatter(logging.Formatter): 'method': record.funcName, 'lineNumber': record.lineno } - document.update(PypeLogger.get_process_data()) + document.update(Logger.get_process_data()) # Standard document decorated with exception info if record.exc_info is not None: @@ -180,7 +180,7 @@ class MongoFormatter(logging.Formatter): return document -class PypeLogger: +class Logger: DFT = '%(levelname)s >>> { %(name)s }: [ %(message)s ] ' DBG = " - { %(name)s }: [ %(message)s ] " INF = ">>> [ %(message)s ] " @@ -482,3 +482,15 @@ class PypeLogger: cls.initialize() return OpenPypeMongoConnection.get_mongo_client() + + +class PypeLogger(Logger): + @classmethod + def get_logger(cls, *args, **kwargs): + logger = Logger.get_logger(*args, **kwargs) + # TODO uncomment when replaced most of places + # logger.warning(( + # "'openpype.lib.PypeLogger' is deprecated class." + # " Please use 'openpype.lib.Logger' instead." + # )) + return logger From 2657ff27f186bdcf8098f8f7878947fc36bec1f5 Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Tue, 26 Jul 2022 13:32:00 +0300 Subject: [PATCH 536/785] Replace deprecated functions --- openpype/hosts/maya/api/lib_rendersettings.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/maya/api/lib_rendersettings.py b/openpype/hosts/maya/api/lib_rendersettings.py index 38f493a4a8..6f41a5d169 100644 --- a/openpype/hosts/maya/api/lib_rendersettings.py +++ b/openpype/hosts/maya/api/lib_rendersettings.py @@ -7,11 +7,11 @@ import sys from openpype.api import ( get_project_settings, - get_asset) + ) from openpype.pipeline import legacy_io from openpype.pipeline import CreatorError - +from openpype.pipeline.context_tools import get_current_project_asset class RenderSettings(object): @@ -66,7 +66,7 @@ class RenderSettings(object): renderer = cmds.getAttr( 'defaultRenderGlobals.currentRenderer').lower() - asset_doc = get_asset() + asset_doc = get_current_project_asset() # project_settings/maya/create/CreateRender/aov_separator try: aov_separator = self._aov_chars[( From c7bf29d17cdb1c5ceea21dc3e104427290cf71a3 Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Tue, 26 Jul 2022 13:33:17 +0300 Subject: [PATCH 537/785] Style fixes --- openpype/hosts/maya/api/lib_rendersettings.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/maya/api/lib_rendersettings.py b/openpype/hosts/maya/api/lib_rendersettings.py index 6f41a5d169..0668c242f0 100644 --- a/openpype/hosts/maya/api/lib_rendersettings.py +++ b/openpype/hosts/maya/api/lib_rendersettings.py @@ -7,12 +7,13 @@ import sys from openpype.api import ( get_project_settings, - ) +) from openpype.pipeline import legacy_io from openpype.pipeline import CreatorError from openpype.pipeline.context_tools import get_current_project_asset + class RenderSettings(object): _image_prefix_nodes = { From a39eef07f4a91fe775d8c492fc0dbbf9502f4c2f Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Tue, 26 Jul 2022 13:52:52 +0300 Subject: [PATCH 538/785] Fix frame range reset. --- openpype/hosts/maya/api/lib_rendersettings.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/openpype/hosts/maya/api/lib_rendersettings.py b/openpype/hosts/maya/api/lib_rendersettings.py index 0668c242f0..ee61f954e0 100644 --- a/openpype/hosts/maya/api/lib_rendersettings.py +++ b/openpype/hosts/maya/api/lib_rendersettings.py @@ -7,11 +7,13 @@ import sys from openpype.api import ( get_project_settings, + ) from openpype.pipeline import legacy_io from openpype.pipeline import CreatorError from openpype.pipeline.context_tools import get_current_project_asset +from openpype.hosts.maya.api.commands import reset_frame_range class RenderSettings(object): @@ -152,6 +154,7 @@ class RenderSettings(object): cmds.setAttr(str(attribute), int(value), type = "Boolean") # noqa elif (cmds.setAttr(str(attribute), type=True)) == "string": cmds.setAttr(str(attribute), str(value), type = "string") # noqa + reset_frame_range() def _set_redshift_settings(self, width, height): """Sets settings for Redshift.""" From 6e77634c67f39ce22d06068bc5110c2cae46686f Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Tue, 26 Jul 2022 14:05:57 +0300 Subject: [PATCH 539/785] Fix attribute type check bug. --- openpype/hosts/maya/api/lib_rendersettings.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/maya/api/lib_rendersettings.py b/openpype/hosts/maya/api/lib_rendersettings.py index ee61f954e0..c3bccf0add 100644 --- a/openpype/hosts/maya/api/lib_rendersettings.py +++ b/openpype/hosts/maya/api/lib_rendersettings.py @@ -148,11 +148,11 @@ class RenderSettings(object): # command accordingly. for item in additional_options: attribute, value = item - if (cmds.setAttr(str(attribute), type=True)) == "long": + if (cmds.getAttr(str(attribute), type=True)) == "long": cmds.setAttr(str(attribute), int(value)) - elif (cmds.setAttr(str(attribute), type=True)) == "bool": + elif (cmds.getAttr(str(attribute), type=True)) == "bool": cmds.setAttr(str(attribute), int(value), type = "Boolean") # noqa - elif (cmds.setAttr(str(attribute), type=True)) == "string": + elif (cmds.getAttr(str(attribute), type=True)) == "string": cmds.setAttr(str(attribute), str(value), type = "string") # noqa reset_frame_range() From 5e9799ee1649c5686fd2987b54331c6b1ea14b57 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Tue, 26 Jul 2022 20:04:08 +0800 Subject: [PATCH 540/785] Enable write color sets on animation publish automatically --- openpype/hosts/maya/plugins/create/create_animation.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/create/create_animation.py b/openpype/hosts/maya/plugins/create/create_animation.py index 5cd1f7090a..ef6608054d 100644 --- a/openpype/hosts/maya/plugins/create/create_animation.py +++ b/openpype/hosts/maya/plugins/create/create_animation.py @@ -22,7 +22,7 @@ class CreateAnimation(plugin.Creator): self.data[key] = value # Write vertex colors with the geometry. - self.data["writeColorSets"] = False + self.data["writeColorSets"] = True self.data["writeFaceSets"] = False # Include only renderable visible shapes. From 9377d20be1f10c41f49e303062485d7a8f6af85d Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 26 Jul 2022 16:12:12 +0200 Subject: [PATCH 541/785] implemented functions to extract template data --- openpype/pipeline/template_data.py | 226 +++++++++++++++++++++++++++++ 1 file changed, 226 insertions(+) create mode 100644 openpype/pipeline/template_data.py diff --git a/openpype/pipeline/template_data.py b/openpype/pipeline/template_data.py new file mode 100644 index 0000000000..de46650f9d --- /dev/null +++ b/openpype/pipeline/template_data.py @@ -0,0 +1,226 @@ +from openpype.client import get_project, get_asset_by_name +from openpype.settings import get_system_settings +from openpype.lib.local_settings import get_openpype_username + + +def get_general_template_data(system_settings=None): + """General template data based on system settings or machine. + + Output contains formatting keys: + - 'studio[name]' - Studio name filled from system settings + - 'studio[code]' - Studio code filled from system settings + - 'user' - User's name using 'get_openpype_username' + + Args: + system_settings (Dict[str, Any]): System settings. + """ + + if not system_settings: + system_settings = get_system_settings() + studio_name = system_settings["general"]["studio_name"] + studio_code = system_settings["general"]["studio_code"] + return { + "studio": { + "name": studio_name, + "code": studio_code + }, + "user": get_openpype_username() + } + + +def get_project_template_data(project_doc): + """Extract data from project document that are used in templates. + + Project document must have 'name' and (at this moment) optional + key 'data.code'. + + Output contains formatting keys: + - 'project[name]' - Project name + - 'project[code]' - Project code + + Args: + project_doc (Dict[str, Any]): Queried project document. + + Returns: + Dict[str, Dict[str, str]]: Template data based on project document. + """ + + project_code = project_doc.get("data", {}).get("code") + return { + "project": { + "name": project_doc["name"], + "code": project_code + } + } + + +def get_asset_template_data(asset_doc, project_name): + """Extract data from asset document that are used in templates. + + Output dictionary contains keys: + - 'asset' - asset name + - 'hierarchy' - parent asset names joined with '/' + - 'parent' - direct parent name, project name used if is under project + + Required document fields: + Asset: 'name', 'data.parents' + + Args: + asset_doc (Dict[str, Any]): Queried asset document. + project_name (str): Is used for 'parent' key if asset doc does not have + any. + + Returns: + Dict[str, str]: Data that are based on asset document and can be used + in templates. + """ + + asset_parents = asset_doc["data"]["parents"] + hierarchy = "/".join(asset_parents) + if asset_parents: + parent_name = asset_parents[-1] + else: + parent_name = project_name + + return { + "asset": asset_doc["name"], + "hierarchy": hierarchy, + "parent": parent_name + } + + +def get_task_type(asset_doc, task_name): + """Get task type based on asset document and task name. + + Required document fields: + Asset: 'data.tasks' + + Args: + asset_doc (Dict[str, Any]): Queried asset document. + task_name (str): Task name which is under asset. + + Returns: + str: Task type name. + None: Task was not found on asset document. + """ + + asset_tasks_info = asset_doc["data"]["tasks"] + return asset_tasks_info.get(task_name, {}).get("type") + + +def get_task_template_data(project_doc, asset_doc, task_name): + """"Extract task specific data from project and asset documents. + + Required document fields: + Project: 'config.tasks' + Asset: 'data.tasks'. + + Args: + project_doc (Dict[str, Any]): Queried project document. + asset_doc (Dict[str, Any]): Queried asset document. + tas_name (str): Name of task for which data should be returned. + + Returns: + Dict[str, Dict[str, str]]: Template data + """ + + project_task_types = project_doc["config"]["tasks"] + task_type = get_task_type(asset_doc, task_name) + task_code = project_task_types.get(task_type, {}).get("short_name") + + return { + "task": { + "name": task_name, + "type": task_type, + "short": task_code, + } + } + + +def get_template_data( + project_doc, + asset_doc=None, + task_name=None, + host_name=None, + system_settings=None +): + """Prepare data for templates filling from entered documents and info. + + This function does not "auto fill" any values except system settings and + it's on purpose. + + Universal function to receive template data from passed arguments. Only + required argument is project document all other arguments are optional + and their values won't be added to template data if are not passed. + + Required document fields: + Project: 'name', 'data.code', 'config.tasks' + Asset: 'name', 'data.parents', 'data.tasks' + + Args: + project_doc (Dict[str, Any]): Mongo document of project from MongoDB. + asset_doc (Dict[str, Any]): Mongo document of asset from MongoDB. + task_name (Union[str, None]): Task name under passed asset. + host_name (Union[str, None]): Used to fill '{app}' key. + system_settings (Union[Dict, None]): Prepared system settings. + They're queried if not passed (may be slower). + + Returns: + Dict[str, Any]: Data prepared for filling workdir template. + """ + + template_data = get_general_template_data(system_settings) + template_data.update(get_project_template_data(project_doc)) + if asset_doc: + template_data.update(get_asset_template_data( + asset_doc, project_doc["name"] + )) + if task_name: + template_data.update(get_task_template_data( + project_doc, asset_doc, task_name + )) + + if host_name: + template_data["app"] = host_name + + return template_data + + +def get_template_data_with_names( + project_name, + asset_name=None, + task_name=None, + host_name=None, + system_settings=None +): + """Prepare data for templates filling from entered entity names and info. + + Copy of 'get_template_data' but based on entity names instead of documents. + Only difference is that documents are queried. + + Args: + project_name (str): Project name for which template data are + calculated. + asset_name (Union[str, None]): Asset name for which template data are + calculated. + task_name (Union[str, None]): Task name under passed asset. + host_name (Union[str, None]):Used to fill '{app}' key. + because workdir template may contain `{app}` key. + system_settings (Union[Dict, None]): Prepared system settings. + They're queried if not passed. + + Returns: + Dict[str, Any]: Data prepared for filling workdir template. + """ + + project_doc = get_project(project_name, fields=["name", "data.code"]) + asset_doc = None + if asset_name: + asset_doc = get_asset_by_name( + project_name, + asset_name, + fields=["name", "data.parents", "data.tasks"] + ) + return get_template_data( + project_doc, asset_doc, task_name, host_name, system_settings + ) From a26fd8394c71f0f01552f20987ac6618747d1572 Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Tue, 26 Jul 2022 17:32:26 +0300 Subject: [PATCH 542/785] Propagate render settings key to grey out apply button. --- openpype/hosts/maya/api/menu.py | 20 +++++++++++++------- 1 file changed, 13 insertions(+), 7 deletions(-) diff --git a/openpype/hosts/maya/api/menu.py b/openpype/hosts/maya/api/menu.py index c3ce8b0227..7d2d0dc3f5 100644 --- a/openpype/hosts/maya/api/menu.py +++ b/openpype/hosts/maya/api/menu.py @@ -6,7 +6,7 @@ from Qt import QtWidgets, QtGui import maya.utils import maya.cmds as cmds -from openpype.api import BuildWorkfile +from openpype.api import BuildWorkfile, get_current_project_settings from openpype.settings import get_project_settings from openpype.pipeline import legacy_io from openpype.tools.utils import host_tools @@ -98,12 +98,18 @@ def install(): ) cmds.menuItem(divider=True) - - cmds.menuItem( - "Set Render Settings", - command=lambda *args: lib_rendersettings.RenderSettings().set_default_renderer_settings() # noqa - ) - + # project_settings/maya/RenderSettings/apply_render_settings + render_settings_flag = get_current_project_settings()["maya"]["RenderSettings"]["apply_render_settings"] # noqa + if render_settings_flag: + cmds.menuItem( + "Set Render Settings", + command=lambda *args: lib_rendersettings.RenderSettings().set_default_renderer_settings(), # noqa + enable=True) + else: + cmds.menuItem( + "Set Render Settings", + command=lambda *args: lib_rendersettings.RenderSettings().set_default_renderer_settings(), # noqa + enable=False) cmds.menuItem(divider=True) cmds.menuItem( From 58309c3d3b970ea5f55a08e6b1b1c092b3d6413a Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Tue, 26 Jul 2022 17:38:58 +0300 Subject: [PATCH 543/785] Remove Mental Ray related code. --- openpype/hosts/maya/api/lib_rendersettings.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/openpype/hosts/maya/api/lib_rendersettings.py b/openpype/hosts/maya/api/lib_rendersettings.py index c3bccf0add..768f9156c3 100644 --- a/openpype/hosts/maya/api/lib_rendersettings.py +++ b/openpype/hosts/maya/api/lib_rendersettings.py @@ -19,7 +19,6 @@ from openpype.hosts.maya.api.commands import reset_frame_range class RenderSettings(object): _image_prefix_nodes = { - 'mentalray': 'defaultRenderGlobals.imageFilePrefix', 'vray': 'vraySettings.fileNamePrefix', 'arnold': 'defaultRenderGlobals.imageFilePrefix', 'renderman': 'defaultRenderGlobals.imageFilePrefix', @@ -27,7 +26,6 @@ class RenderSettings(object): } _image_prefixes = { - 'mentalray': 'maya///{aov_separator}', # noqa 'vray': 'maya///', 'arnold': 'maya///{aov_separator}', # noqa 'renderman': 'maya///{aov_separator}', From 2a3255a9cb6a5eed64c906cd28cfdb2e6679d83b Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 26 Jul 2022 16:40:35 +0200 Subject: [PATCH 544/785] added function which calculate template data based on context session --- openpype/pipeline/context_tools.py | 29 +++++++++++++++++++++++++++++ 1 file changed, 29 insertions(+) diff --git a/openpype/pipeline/context_tools.py b/openpype/pipeline/context_tools.py index a8e55479b6..0535ce5d54 100644 --- a/openpype/pipeline/context_tools.py +++ b/openpype/pipeline/context_tools.py @@ -19,7 +19,9 @@ from openpype.client import ( from openpype.modules import load_modules, ModulesManager from openpype.settings import get_project_settings from openpype.lib import filter_pyblish_plugins + from .anatomy import Anatomy +from .template_data import get_template_data_with_names from . import ( legacy_io, register_loader_plugin_path, @@ -336,6 +338,7 @@ def get_current_project_asset(asset_name=None, asset_id=None, fields=None): return None return get_asset_by_name(project_name, asset_name, fields=fields) + def is_representation_from_latest(representation): """Return whether the representation is from latest version @@ -348,3 +351,29 @@ def is_representation_from_latest(representation): project_name = legacy_io.active_project() return version_is_latest(project_name, representation["parent"]) + + +def get_template_data_from_session(session=None, system_settings=None): + """Template data for template fill from session keys. + + Args: + session (Union[Dict[str, str], None]): The Session to use. If not + provided use the currently active global Session. + system_settings (Union[Dict[str, Any], Any]): Prepared system settings. + Optional are auto received if not passed. + + Returns: + Dict[str, Any]: All available data from session. + """ + + if session is None: + session = legacy_io.Session + + project_name = session["AVALON_PROJECT"] + asset_name = session["AVALON_ASSET"] + task_name = session["AVALON_TASK"] + host_name = session["AVALON_APP"] + + return get_template_data_with_names( + project_name, asset_name, task_name, host_name, system_settings + ) From 5c6b47e503b78e841a173575f222b89d49b5c1f3 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 26 Jul 2022 16:47:11 +0200 Subject: [PATCH 545/785] mark functions in lib as deprecated and re-use functions from openpype.pipeline --- openpype/lib/avalon_context.py | 80 +++++++++------------------------- 1 file changed, 20 insertions(+), 60 deletions(-) diff --git a/openpype/lib/avalon_context.py b/openpype/lib/avalon_context.py index 4076a91c36..73014f5a5d 100644 --- a/openpype/lib/avalon_context.py +++ b/openpype/lib/avalon_context.py @@ -21,14 +21,10 @@ from openpype.client import ( get_representations, get_workfile_info, ) -from openpype.settings import ( - get_project_settings, - get_system_settings -) +from openpype.settings import get_project_settings from .profiles_filtering import filter_profiles from .events import emit_event from .path_templates import StringTemplate -from .local_settings import get_openpype_username legacy_io = None @@ -222,17 +218,11 @@ def get_asset(asset_name=None): return get_current_project_asset(asset_name=asset_name) +@deprecated("openpype.pipeline.template_data.get_general_template_data") def get_system_general_anatomy_data(system_settings=None): - if not system_settings: - system_settings = get_system_settings() - studio_name = system_settings["general"]["studio_name"] - studio_code = system_settings["general"]["studio_code"] - return { - "studio": { - "name": studio_name, - "code": studio_code - } - } + from openpype.pipeline.template_data import get_general_template_data + + return get_general_template_data(system_settings) def get_linked_asset_ids(asset_doc): @@ -424,7 +414,7 @@ def get_workfile_template_key( return default -# TODO rename function as is not just "work" specific +@deprecated("openpype.pipeline.template_data.get_template_data") def get_workdir_data(project_doc, asset_doc, task_name, host_name): """Prepare data for workdir template filling from entered information. @@ -437,40 +427,14 @@ def get_workdir_data(project_doc, asset_doc, task_name, host_name): Returns: dict: Data prepared for filling workdir template. + """ - task_type = asset_doc['data']['tasks'].get(task_name, {}).get('type') - project_task_types = project_doc["config"]["tasks"] - task_code = project_task_types.get(task_type, {}).get("short_name") + from openpype.pipeline.template_data import get_template_data - asset_parents = asset_doc["data"]["parents"] - hierarchy = "/".join(asset_parents) - - parent_name = project_doc["name"] - if asset_parents: - parent_name = asset_parents[-1] - - data = { - "project": { - "name": project_doc["name"], - "code": project_doc["data"].get("code") - }, - "task": { - "name": task_name, - "type": task_type, - "short": task_code, - }, - "asset": asset_doc["name"], - "parent": parent_name, - "app": host_name, - "user": get_openpype_username(), - "hierarchy": hierarchy, - } - - system_general_data = get_system_general_anatomy_data() - data.update(system_general_data) - - return data + return get_template_data( + project_doc, asset_doc, task_name, host_name + ) def get_workdir_with_workdir_data( @@ -565,27 +529,21 @@ def get_workdir( ) -@with_pipeline_io +@deprecated("openpype.pipeline.context_tools.get_template_data_from_session") def template_data_from_session(session=None): """ Return dictionary with template from session keys. Args: session (dict, Optional): The Session to use. If not provided use the currently active global Session. + Returns: dict: All available data from session. + """ - if session is None: - session = legacy_io.Session - - project_name = session["AVALON_PROJECT"] - asset_name = session["AVALON_ASSET"] - task_name = session["AVALON_TASK"] - host_name = session["AVALON_APP"] - project_doc = get_project(project_name) - asset_doc = get_asset_by_name(project_name, asset_name) - return get_workdir_data(project_doc, asset_doc, task_name, host_name) + from openpype.pipeline.context_tools import get_template_data_from_session + return get_template_data_from_session(session) @with_pipeline_io @@ -660,13 +618,14 @@ def compute_session_changes( @with_pipeline_io def get_workdir_from_session(session=None, template_key=None): from openpype.pipeline import Anatomy + from openpype.pipeline.context_tools import get_template_data_from_session if session is None: session = legacy_io.Session project_name = session["AVALON_PROJECT"] host_name = session["AVALON_APP"] anatomy = Anatomy(project_name) - template_data = template_data_from_session(session) + template_data = get_template_data_from_session(session) anatomy_filled = anatomy.format(template_data) if not template_key: @@ -695,8 +654,8 @@ def update_current_task(task=None, asset=None, app=None, template_key=None): Returns: dict: The changed key, values in the current Session. - """ + changes = compute_session_changes( legacy_io.Session, task=task, @@ -768,6 +727,7 @@ def create_workfile_doc(asset_doc, task_name, filename, workdir, dbcon=None): dbcon (AvalonMongoDB): Optionally enter avalon AvalonMongoDB object and `legacy_io` is used if not entered. """ + from openpype.pipeline import Anatomy # Use legacy_io if dbcon is not entered From 54bb85b2043bab1b9b1a0b5d8236d2c694c9a66f Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Tue, 26 Jul 2022 17:47:58 +0300 Subject: [PATCH 546/785] Remove unnecessary comment. --- openpype/hosts/maya/api/menu.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/maya/api/menu.py b/openpype/hosts/maya/api/menu.py index 7d2d0dc3f5..ed546ba7a8 100644 --- a/openpype/hosts/maya/api/menu.py +++ b/openpype/hosts/maya/api/menu.py @@ -98,7 +98,7 @@ def install(): ) cmds.menuItem(divider=True) - # project_settings/maya/RenderSettings/apply_render_settings + render_settings_flag = get_current_project_settings()["maya"]["RenderSettings"]["apply_render_settings"] # noqa if render_settings_flag: cmds.menuItem( From f120f22c71ce2590e191fcf58b4be9967b17f15c Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 26 Jul 2022 16:48:13 +0200 Subject: [PATCH 547/785] Added information about removement to docstrings of deprecated functions --- openpype/lib/avalon_context.py | 25 +++++++++++++++++++++++-- 1 file changed, 23 insertions(+), 2 deletions(-) diff --git a/openpype/lib/avalon_context.py b/openpype/lib/avalon_context.py index 73014f5a5d..521d1e05e1 100644 --- a/openpype/lib/avalon_context.py +++ b/openpype/lib/avalon_context.py @@ -184,6 +184,9 @@ def is_latest(representation): Returns: bool: Whether the representation is of latest version. + + Deprecated: + Function will be removed after release version 3.14.* """ from openpype.pipeline.context_tools import is_representation_from_latest @@ -193,7 +196,11 @@ def is_latest(representation): @deprecated("openpype.pipeline.load.any_outdated_containers") def any_outdated(): - """Return whether the current scene has any outdated content""" + """Return whether the current scene has any outdated content. + + Deprecated: + Function will be removed after release version 3.14.* + """ from openpype.pipeline.load import any_outdated_containers @@ -211,6 +218,9 @@ def get_asset(asset_name=None): Returns: (MongoDB document) + + Deprecated: + Function will be removed after release version 3.14.* """ from openpype.pipeline.context_tools import get_current_project_asset @@ -220,6 +230,10 @@ def get_asset(asset_name=None): @deprecated("openpype.pipeline.template_data.get_general_template_data") def get_system_general_anatomy_data(system_settings=None): + """ + Deprecated: + Function will be removed after release version 3.14.* + """ from openpype.pipeline.template_data import get_general_template_data return get_general_template_data(system_settings) @@ -287,7 +301,10 @@ def get_latest_version(asset_name, subset_name, dbcon=None, project_name=None): Returns: None: If asset, subset or version were not found. - dict: Last version document for entered . + dict: Last version document for entered. + + Deprecated: + Function will be removed after release version 3.14.* """ if not project_name: @@ -428,6 +445,8 @@ def get_workdir_data(project_doc, asset_doc, task_name, host_name): Returns: dict: Data prepared for filling workdir template. + Deprecated: + Function will be removed after release version 3.14.* """ from openpype.pipeline.template_data import get_template_data @@ -540,6 +559,8 @@ def template_data_from_session(session=None): Returns: dict: All available data from session. + Deprecated: + Function will be removed after release version 3.14.* """ from openpype.pipeline.context_tools import get_template_data_from_session From 3561454a5f83129629929f3c9b6d937654d3e787 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 26 Jul 2022 16:48:41 +0200 Subject: [PATCH 548/785] removed unused imports --- openpype/lib/avalon_context.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/openpype/lib/avalon_context.py b/openpype/lib/avalon_context.py index 521d1e05e1..95c547ce34 100644 --- a/openpype/lib/avalon_context.py +++ b/openpype/lib/avalon_context.py @@ -13,10 +13,8 @@ from openpype.client import ( get_project, get_assets, get_asset_by_name, - get_subset_by_name, get_subsets, get_last_versions, - get_last_version_by_subset_id, get_last_version_by_subset_name, get_representations, get_workfile_info, From 8d7b9af7a52209fc706838abc83109724d5e4741 Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Tue, 26 Jul 2022 18:00:13 +0300 Subject: [PATCH 549/785] Grab image prefixes from settings. --- openpype/hosts/maya/api/lib_rendersettings.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/maya/api/lib_rendersettings.py b/openpype/hosts/maya/api/lib_rendersettings.py index 768f9156c3..e5acdc2139 100644 --- a/openpype/hosts/maya/api/lib_rendersettings.py +++ b/openpype/hosts/maya/api/lib_rendersettings.py @@ -7,7 +7,7 @@ import sys from openpype.api import ( get_project_settings, - + get_current_project_settings ) from openpype.pipeline import legacy_io @@ -26,10 +26,10 @@ class RenderSettings(object): } _image_prefixes = { - 'vray': 'maya///', - 'arnold': 'maya///{aov_separator}', # noqa + 'vray': get_current_project_settings()["maya"]["RenderSettings"]["vray_renderer"]["image_prefix"], # noqa + 'arnold': get_current_project_settings()["maya"]["RenderSettings"]["arnold_renderer"]["image_prefix"], # noqa 'renderman': 'maya///{aov_separator}', - 'redshift': 'maya///{aov_separator}' # noqa + 'redshift': get_current_project_settings()["maya"]["RenderSettings"]["redshift_renderer"]["image_prefix"] # noqa } _aov_chars = { From 9f9ac018bdc076f16fd7940b387445674f192277 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 26 Jul 2022 17:23:12 +0200 Subject: [PATCH 550/785] use new functions instead of 'get_workdir_data' --- openpype/hosts/nuke/api/lib.py | 9 ++++---- .../tvpaint/plugins/load/load_workfile.py | 10 ++++----- .../unreal/hooks/pre_workfile_preparation.py | 13 ++++------- openpype/lib/applications.py | 10 ++++++--- openpype/lib/avalon_context.py | 9 +++++--- .../action_fill_workfile_attr.py | 13 +++++++---- openpype/tools/workfiles/save_as_dialog.py | 22 +++++-------------- 7 files changed, 39 insertions(+), 47 deletions(-) diff --git a/openpype/hosts/nuke/api/lib.py b/openpype/hosts/nuke/api/lib.py index 74db164ae5..87647e214e 100644 --- a/openpype/hosts/nuke/api/lib.py +++ b/openpype/hosts/nuke/api/lib.py @@ -23,7 +23,6 @@ from openpype.api import ( Logger, BuildWorkfile, get_version_from_path, - get_workdir_data, get_current_project_settings, ) from openpype.tools.utils import host_tools @@ -34,6 +33,7 @@ from openpype.settings import ( get_anatomy_settings, ) from openpype.modules import ModulesManager +from openpype.pipeline.template_data import get_template_data_with_names from openpype.pipeline import ( discover_legacy_creator_plugins, legacy_io, @@ -965,12 +965,11 @@ def format_anatomy(data): data["version"] = get_version_from_path(file) project_name = anatomy.project_name - project_doc = get_project(project_name) - asset_doc = get_asset_by_name(project_name, data["avalon"]["asset"]) + asset_name = data["avalon"]["asset"] task_name = os.environ["AVALON_TASK"] host_name = os.environ["AVALON_APP"] - context_data = get_workdir_data( - project_doc, asset_doc, task_name, host_name + context_data = get_template_data_with_names( + project_name, asset_name, task_name, host_name ) data.update(context_data) data.update({ diff --git a/openpype/hosts/tvpaint/plugins/load/load_workfile.py b/openpype/hosts/tvpaint/plugins/load/load_workfile.py index c6dc765a27..8b09d20755 100644 --- a/openpype/hosts/tvpaint/plugins/load/load_workfile.py +++ b/openpype/hosts/tvpaint/plugins/load/load_workfile.py @@ -1,10 +1,8 @@ import os -from openpype.client import get_project, get_asset_by_name from openpype.lib import ( StringTemplate, get_workfile_template_key_from_context, - get_workdir_data, get_last_workfile_with_version, ) from openpype.pipeline import ( @@ -12,6 +10,7 @@ from openpype.pipeline import ( legacy_io, Anatomy, ) +from openpype.pipeline.template_data import get_template_data_with_names from openpype.hosts.tvpaint.api import lib, pipeline, plugin @@ -54,9 +53,6 @@ class LoadWorkfile(plugin.Loader): asset_name = legacy_io.Session["AVALON_ASSET"] task_name = legacy_io.Session["AVALON_TASK"] - project_doc = get_project(project_name) - asset_doc = get_asset_by_name(project_name, asset_name) - template_key = get_workfile_template_key_from_context( asset_name, task_name, @@ -66,7 +62,9 @@ class LoadWorkfile(plugin.Loader): ) anatomy = Anatomy(project_name) - data = get_workdir_data(project_doc, asset_doc, task_name, host_name) + data = get_template_data_with_names( + project_name, asset_name, task_name, host_name + ) data["root"] = anatomy.roots file_template = anatomy.templates[template_key]["file"] diff --git a/openpype/hosts/unreal/hooks/pre_workfile_preparation.py b/openpype/hosts/unreal/hooks/pre_workfile_preparation.py index 5be04fc841..50b34bd573 100644 --- a/openpype/hosts/unreal/hooks/pre_workfile_preparation.py +++ b/openpype/hosts/unreal/hooks/pre_workfile_preparation.py @@ -1,13 +1,13 @@ # -*- coding: utf-8 -*- """Hook to launch Unreal and prepare projects.""" import os +import copy from pathlib import Path from openpype.lib import ( PreLaunchHook, ApplicationLaunchFailed, ApplicationNotFound, - get_workdir_data, get_workfile_template_key ) import openpype.hosts.unreal.lib as unreal_lib @@ -35,18 +35,13 @@ class UnrealPrelaunchHook(PreLaunchHook): return last_workfile.name # Prepare data for fill data and for getting workfile template key - task_name = self.data["task_name"] anatomy = self.data["anatomy"] - asset_doc = self.data["asset_doc"] project_doc = self.data["project_doc"] - asset_tasks = asset_doc.get("data", {}).get("tasks") or {} - task_info = asset_tasks.get(task_name) or {} - task_type = task_info.get("type") + # Use already prepared workdir data + workdir_data = copy.deepcopy(self.data["workdir_data"]) + task_type = workdir_data.get("task", {}).get("type") - workdir_data = get_workdir_data( - project_doc, asset_doc, task_name, self.host_name - ) # QUESTION raise exception if version is part of filename template? workdir_data["version"] = 1 workdir_data["ext"] = "uproject" diff --git a/openpype/lib/applications.py b/openpype/lib/applications.py index f46197e15f..da8623ea13 100644 --- a/openpype/lib/applications.py +++ b/openpype/lib/applications.py @@ -28,7 +28,6 @@ from . import PypeLogger from .profiles_filtering import filter_profiles from .local_settings import get_openpype_username from .avalon_context import ( - get_workdir_data, get_workdir_with_workdir_data, get_workfile_template_key, get_last_workfile @@ -1576,6 +1575,9 @@ def prepare_context_environments(data, env_group=None): data (EnvironmentPrepData): Dictionary where result and intermediate result will be stored. """ + + from openpype.pipeline.template_data import get_template_data + # Context environments log = data["log"] @@ -1596,7 +1598,9 @@ def prepare_context_environments(data, env_group=None): # Load project specific environments project_name = project_doc["name"] project_settings = get_project_settings(project_name) + system_settings = get_system_settings() data["project_settings"] = project_settings + data["system_settings"] = system_settings # Apply project specific environments on current env value apply_project_environments_value( project_name, data["env"], project_settings, env_group @@ -1619,8 +1623,8 @@ def prepare_context_environments(data, env_group=None): if not app.is_host: return - workdir_data = get_workdir_data( - project_doc, asset_doc, task_name, app.host_name + workdir_data = get_template_data( + project_doc, asset_doc, task_name, app.host_name, system_settings ) data["workdir_data"] = workdir_data diff --git a/openpype/lib/avalon_context.py b/openpype/lib/avalon_context.py index 95c547ce34..42854f39d6 100644 --- a/openpype/lib/avalon_context.py +++ b/openpype/lib/avalon_context.py @@ -533,11 +533,13 @@ def get_workdir( TemplateResult: Workdir path. """ + from openpype.pipeline import Anatomy + from openpype.pipeline.template_data import get_template_data + if not anatomy: - from openpype.pipeline import Anatomy anatomy = Anatomy(project_doc["name"]) - workdir_data = get_workdir_data( + workdir_data = get_template_data( project_doc, asset_doc, task_name, host_name ) # Output is TemplateResult object which contain useful data @@ -748,6 +750,7 @@ def create_workfile_doc(asset_doc, task_name, filename, workdir, dbcon=None): """ from openpype.pipeline import Anatomy + from openpype.pipeline.template_data import get_template_data # Use legacy_io if dbcon is not entered if not dbcon: @@ -766,7 +769,7 @@ def create_workfile_doc(asset_doc, task_name, filename, workdir, dbcon=None): # Prepare project for workdir data project_name = dbcon.active_project() project_doc = get_project(project_name) - workdir_data = get_workdir_data( + workdir_data = get_template_data( project_doc, asset_doc, task_name, dbcon.Session["AVALON_APP"] ) # Prepare anatomy diff --git a/openpype/modules/ftrack/event_handlers_user/action_fill_workfile_attr.py b/openpype/modules/ftrack/event_handlers_user/action_fill_workfile_attr.py index d91649d7ba..c7fa2dce5e 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_fill_workfile_attr.py +++ b/openpype/modules/ftrack/event_handlers_user/action_fill_workfile_attr.py @@ -11,13 +11,13 @@ from openpype.client import ( get_project, get_assets, ) -from openpype.settings import get_project_settings +from openpype.settings import get_project_settings, get_system_settings from openpype.lib import ( get_workfile_template_key, - get_workdir_data, StringTemplate, ) from openpype.pipeline import Anatomy +from openpype.pipeline.template_data import get_template_data from openpype_modules.ftrack.lib import BaseAction, statics_icon from openpype_modules.ftrack.lib.avalon_sync import create_chunks @@ -279,14 +279,19 @@ class FillWorkfileAttributeAction(BaseAction): extension = "{ext}" project_doc = get_project(project_name) project_settings = get_project_settings(project_name) + system_settings = get_system_settings() anatomy = Anatomy(project_name) templates_by_key = {} operations = [] for asset_doc, task_entities in asset_docs_with_task_entities: for task_entity in task_entities: - workfile_data = get_workdir_data( - project_doc, asset_doc, task_entity["name"], host_name + workfile_data = get_template_data( + project_doc, + asset_doc, + task_entity["name"], + host_name, + system_settings ) # Use version 1 for each workfile workfile_data["version"] = 1 diff --git a/openpype/tools/workfiles/save_as_dialog.py b/openpype/tools/workfiles/save_as_dialog.py index b62fd2c889..ea602846e7 100644 --- a/openpype/tools/workfiles/save_as_dialog.py +++ b/openpype/tools/workfiles/save_as_dialog.py @@ -5,18 +5,12 @@ import logging from Qt import QtWidgets, QtCore -from openpype.client import ( - get_project, - get_asset_by_name, -) -from openpype.lib import ( - get_last_workfile_with_version, - get_workdir_data, -) +from openpype.lib import get_last_workfile_with_version from openpype.pipeline import ( registered_host, legacy_io, ) +from openpype.pipeline.template_data import get_template_data_with_names from openpype.tools.utils import PlaceholderLineEdit log = logging.getLogger(__name__) @@ -30,16 +24,10 @@ def build_workfile_data(session): asset_name = session["AVALON_ASSET"] task_name = session["AVALON_TASK"] host_name = session["AVALON_APP"] - project_doc = get_project( - project_name, fields=["name", "data.code", "config.tasks"] - ) - asset_doc = get_asset_by_name( - project_name, - asset_name, - fields=["name", "data.tasks", "data.parents"] - ) - data = get_workdir_data(project_doc, asset_doc, task_name, host_name) + data = get_template_data_with_names( + project_name, asset_name, task_name, host_name + ) data.update({ "version": 1, "comment": "", From c44ec02d5e1ff3a370fa03d3057f53663f791e3d Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Tue, 26 Jul 2022 23:36:17 +0800 Subject: [PATCH 551/785] update the setting which allows switching on/off write color sets in animation publish --- .../maya/plugins/create/create_animation.py | 3 +- .../defaults/project_settings/maya.json | 2 ++ .../schemas/schema_maya_create.json | 29 ++++++++++++++++--- 3 files changed, 29 insertions(+), 5 deletions(-) diff --git a/openpype/hosts/maya/plugins/create/create_animation.py b/openpype/hosts/maya/plugins/create/create_animation.py index ef6608054d..b7f473acef 100644 --- a/openpype/hosts/maya/plugins/create/create_animation.py +++ b/openpype/hosts/maya/plugins/create/create_animation.py @@ -11,6 +11,7 @@ class CreateAnimation(plugin.Creator): label = "Animation" family = "animation" icon = "male" + write_color_sets = False def __init__(self, *args, **kwargs): super(CreateAnimation, self).__init__(*args, **kwargs) @@ -22,7 +23,7 @@ class CreateAnimation(plugin.Creator): self.data[key] = value # Write vertex colors with the geometry. - self.data["writeColorSets"] = True + self.data["writeColorSets"] = self.write_color_sets self.data["writeFaceSets"] = False # Include only renderable visible shapes. diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index c96acbff6d..70bedf55d8 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -90,9 +90,11 @@ }, "CreateAnimation": { "enabled": true, + "write_color_sets": false, "defaults": [ "Main" ] + }, "CreateAss": { "enabled": true, diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json index 09287a8b50..9000b0246f 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json @@ -143,6 +143,31 @@ } ] }, + { + "type": "dict", + "collapsible": true, + "key": "CreateAnimation", + "label": "Create Animation", + "checkbox_key": "enabled", + "children": [ + { + "type": "boolean", + "key": "enabled", + "label": "Enabled" + }, + { + "type": "boolean", + "key": "write_color_sets", + "label": "Write Color Sets" + }, + { + "type": "list", + "key": "defaults", + "label": "Default Subsets", + "object_type": "text" + } + ] + }, { "type": "schema_template", "name": "template_create_plugin", @@ -159,10 +184,6 @@ "key": "CreateMultiverseUsdOver", "label": "Create Multiverse USD Override" }, - { - "key": "CreateAnimation", - "label": "Create Animation" - }, { "key": "CreateAss", "label": "Create Ass" From 8259be5a1ad3815e4a5eb3a39edf7c858dddff0a Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 26 Jul 2022 17:36:45 +0200 Subject: [PATCH 552/785] simplified collect anatomy context data --- .../publish/collect_anatomy_context_data.py | 66 ++++++------------- 1 file changed, 21 insertions(+), 45 deletions(-) diff --git a/openpype/plugins/publish/collect_anatomy_context_data.py b/openpype/plugins/publish/collect_anatomy_context_data.py index 0794adfb67..8433816908 100644 --- a/openpype/plugins/publish/collect_anatomy_context_data.py +++ b/openpype/plugins/publish/collect_anatomy_context_data.py @@ -15,10 +15,8 @@ Provides: import json import pyblish.api -from openpype.lib import ( - get_system_general_anatomy_data -) from openpype.pipeline import legacy_io +from openpype.pipeline.template_data import get_template_data class CollectAnatomyContextData(pyblish.api.ContextPlugin): @@ -33,11 +31,15 @@ class CollectAnatomyContextData(pyblish.api.ContextPlugin): "asset": "AssetName", "hierarchy": "path/to/asset", "task": "Working", + "user": "MeDespicable", + # Duplicated entry "username": "MeDespicable", + # Current host name + "app": "maya" + *** OPTIONAL *** - "app": "maya" # Current application base name - + mutliple keys from `datetimeData` # see it's collector + + mutliple keys from `datetimeData` (See it's collector) } """ @@ -45,52 +47,26 @@ class CollectAnatomyContextData(pyblish.api.ContextPlugin): label = "Collect Anatomy Context Data" def process(self, context): + host_name = context.data["hostName"] + system_settings = context.data["system_settings"] project_entity = context.data["projectEntity"] - context_data = { - "project": { - "name": project_entity["name"], - "code": project_entity["data"].get("code") - }, - "username": context.data["user"], - "app": context.data["hostName"] - } - - context.data["anatomyData"] = context_data - - # add system general settings anatomy data - system_general_data = get_system_general_anatomy_data() - context_data.update(system_general_data) - - datetime_data = context.data.get("datetimeData") or {} - context_data.update(datetime_data) - asset_entity = context.data.get("assetEntity") + task_name = None if asset_entity: task_name = legacy_io.Session["AVALON_TASK"] - asset_tasks = asset_entity["data"]["tasks"] - task_type = asset_tasks.get(task_name, {}).get("type") + anatomy_data = get_template_data( + project_entity, asset_entity, task_name, host_name, system_settings + ) + anatomy_data.update(context.data.get("datetimeData") or {}) - project_task_types = project_entity["config"]["tasks"] - task_code = project_task_types.get(task_type, {}).get("short_name") + username = context.data["user"] + anatomy_data["user"] = username + # Backwards compatibility for 'username' key + anatomy_data["username"] = username - asset_parents = asset_entity["data"]["parents"] - hierarchy = "/".join(asset_parents) - - parent_name = project_entity["name"] - if asset_parents: - parent_name = asset_parents[-1] - - context_data.update({ - "asset": asset_entity["name"], - "parent": parent_name, - "hierarchy": hierarchy, - "task": { - "name": task_name, - "type": task_type, - "short": task_code, - } - }) + # Store + context.data["anatomyData"] = anatomy_data self.log.info("Global anatomy Data collected") - self.log.debug(json.dumps(context_data, indent=4)) + self.log.debug(json.dumps(anatomy_data, indent=4)) From 7aefc53d98fbc6509c5c90b4b86fd75d7a4344e6 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 26 Jul 2022 18:23:58 +0200 Subject: [PATCH 553/785] removed unnecessary "app" key filling --- openpype/hosts/nuke/api/lib.py | 10 +++------- 1 file changed, 3 insertions(+), 7 deletions(-) diff --git a/openpype/hosts/nuke/api/lib.py b/openpype/hosts/nuke/api/lib.py index 87647e214e..501ab4ba93 100644 --- a/openpype/hosts/nuke/api/lib.py +++ b/openpype/hosts/nuke/api/lib.py @@ -910,19 +910,17 @@ def get_render_path(node): ''' Generate Render path from presets regarding avalon knob data ''' avalon_knob_data = read_avalon_data(node) - data = {'avalon': avalon_knob_data} nuke_imageio_writes = get_imageio_node_setting( node_class=avalon_knob_data["family"], plugin_name=avalon_knob_data["creator"], subset=avalon_knob_data["subset"] ) - host_name = os.environ.get("AVALON_APP") - data.update({ - "app": host_name, + data = { + "avalon": avalon_knob_data, "nuke_imageio_writes": nuke_imageio_writes - }) + } anatomy_filled = format_anatomy(data) return anatomy_filled["render"]["path"].replace("\\", "/") @@ -1127,10 +1125,8 @@ def create_write_node( if knob["name"] == "file_type": representation = knob["value"] - host_name = os.environ.get("AVALON_APP") try: data.update({ - "app": host_name, "imageio_writes": imageio_writes, "representation": representation, }) From a2c61b5233c4d20917c1c4594c6923738dc6b362 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 26 Jul 2022 19:48:06 +0200 Subject: [PATCH 554/785] nuke: slate workflow switch to instance data --- openpype/hosts/nuke/plugins/publish/collect_slate_node.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/hosts/nuke/plugins/publish/collect_slate_node.py b/openpype/hosts/nuke/plugins/publish/collect_slate_node.py index 4257ed3131..bfe32d8fd1 100644 --- a/openpype/hosts/nuke/plugins/publish/collect_slate_node.py +++ b/openpype/hosts/nuke/plugins/publish/collect_slate_node.py @@ -33,6 +33,7 @@ class CollectSlate(pyblish.api.InstancePlugin): if slate_node: instance.data["slateNode"] = slate_node + instance.data["slate"] = True instance.data["families"].append("slate") instance.data["versionData"]["families"].append("slate") self.log.info( From 427c61f22c7b9bc68b1d6a64a238a4db762e7238 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 26 Jul 2022 19:49:00 +0200 Subject: [PATCH 555/785] nuke: fixing farm and local rendering slate workflow --- .../nuke/plugins/publish/extract_render_local.py | 7 +++++-- .../nuke/plugins/publish/extract_slate_frame.py | 8 ++++++++ .../plugins/publish/submit_nuke_deadline.py | 15 +++++---------- .../plugins/publish/submit_publish_job.py | 8 ++++++-- 4 files changed, 24 insertions(+), 14 deletions(-) diff --git a/openpype/hosts/nuke/plugins/publish/extract_render_local.py b/openpype/hosts/nuke/plugins/publish/extract_render_local.py index 1b3bf46b71..7cc9b2f928 100644 --- a/openpype/hosts/nuke/plugins/publish/extract_render_local.py +++ b/openpype/hosts/nuke/plugins/publish/extract_render_local.py @@ -80,8 +80,11 @@ class NukeRenderLocal(openpype.api.Extractor): repre = { 'name': ext, 'ext': ext, - 'frameStart': "%0{}d".format( - len(str(last_frame))) % first_frame, + 'frameStart': ( + "{{:0>{}}}" + .format(len(str(last_frame))) + .format(first_frame) + ), 'files': filenames, "stagingDir": out_dir } diff --git a/openpype/hosts/nuke/plugins/publish/extract_slate_frame.py b/openpype/hosts/nuke/plugins/publish/extract_slate_frame.py index ccfaf0ed46..b5cad143db 100644 --- a/openpype/hosts/nuke/plugins/publish/extract_slate_frame.py +++ b/openpype/hosts/nuke/plugins/publish/extract_slate_frame.py @@ -237,6 +237,7 @@ class ExtractSlateFrame(openpype.api.Extractor): def _render_slate_to_sequence(self, instance): # set slate frame first_frame = instance.data["frameStartHandle"] + last_frame = instance.data["frameEndHandle"] slate_first_frame = first_frame - 1 # render slate as sequence frame @@ -285,6 +286,13 @@ class ExtractSlateFrame(openpype.api.Extractor): matching_repre["files"] = [first_filename, slate_filename] elif slate_filename not in matching_repre["files"]: matching_repre["files"].insert(0, slate_filename) + matching_repre["frameStart"] = ( + "{{:0>{}}}" + .format(len(str(last_frame))) + .format(slate_first_frame) + ) + self.log.debug( + "__ matching_repre: {}".format(pformat(matching_repre))) self.log.warning("Added slate frame to representation files") diff --git a/openpype/modules/deadline/plugins/publish/submit_nuke_deadline.py b/openpype/modules/deadline/plugins/publish/submit_nuke_deadline.py index 93fb511a34..a5f8270ec7 100644 --- a/openpype/modules/deadline/plugins/publish/submit_nuke_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_nuke_deadline.py @@ -80,10 +80,6 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin): "Using published scene for render {}".format(script_path) ) - # exception for slate workflow - if "slate" in instance.data["families"]: - submit_frame_start -= 1 - response = self.payload_submit( instance, script_path, @@ -99,10 +95,6 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin): instance.data["publishJobState"] = "Suspended" if instance.data.get("bakingNukeScripts"): - # exception for slate workflow - if "slate" in instance.data["families"]: - submit_frame_start += 1 - for baking_script in instance.data["bakingNukeScripts"]: render_path = baking_script["bakeRenderPath"] script_path = baking_script["bakeScriptPath"] @@ -365,7 +357,7 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin): if not instance.data.get("expectedFiles"): instance.data["expectedFiles"] = [] - dir = os.path.dirname(path) + dirname = os.path.dirname(path) file = os.path.basename(path) if "#" in file: @@ -377,9 +369,12 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin): instance.data["expectedFiles"].append(path) return + if instance.data.get("slate"): + start_frame -= 1 + for i in range(start_frame, (end_frame + 1)): instance.data["expectedFiles"].append( - os.path.join(dir, (file % i)).replace("\\", "/")) + os.path.join(dirname, (file % i)).replace("\\", "/")) def get_limit_groups(self): """Search for limit group nodes and return group name. diff --git a/openpype/modules/deadline/plugins/publish/submit_publish_job.py b/openpype/modules/deadline/plugins/publish/submit_publish_job.py index 43ea64e565..f05ef31938 100644 --- a/openpype/modules/deadline/plugins/publish/submit_publish_job.py +++ b/openpype/modules/deadline/plugins/publish/submit_publish_job.py @@ -158,7 +158,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): # mapping of instance properties to be transfered to new instance for every # specified family instance_transfer = { - "slate": ["slateFrames"], + "slate": ["slateFrames", "slate"], "review": ["lutPath"], "render2d": ["bakingNukeScripts", "version"], "renderlayer": ["convertToScanline"] @@ -585,11 +585,15 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): " This may cause issues on farm." ).format(staging)) + frame_start = int(instance.get("frameStartHandle")) + if instance.get("slate"): + frame_start -= 1 + rep = { "name": ext, "ext": ext, "files": [os.path.basename(f) for f in list(collection)], - "frameStart": int(instance.get("frameStartHandle")), + "frameStart": frame_start, "frameEnd": int(instance.get("frameEndHandle")), # If expectedFile are absolute, we need only filenames "stagingDir": staging, From c53b7bba7784aff067cfa4cfdeffe35be146180c Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Tue, 26 Jul 2022 21:09:54 +0300 Subject: [PATCH 556/785] Remove unnecessary unused function. --- openpype/hosts/maya/api/lib_rendersettings.py | 12 ------------ openpype/hosts/maya/plugins/create/create_render.py | 2 -- 2 files changed, 14 deletions(-) diff --git a/openpype/hosts/maya/api/lib_rendersettings.py b/openpype/hosts/maya/api/lib_rendersettings.py index e5acdc2139..8c09175614 100644 --- a/openpype/hosts/maya/api/lib_rendersettings.py +++ b/openpype/hosts/maya/api/lib_rendersettings.py @@ -49,18 +49,6 @@ class RenderSettings(object): legacy_io.Session["AVALON_PROJECT"] ) - @staticmethod - def apply_defaults(renderer=None, project_settings=None): - if renderer is None: - renderer = cmds.getAttr( - 'defaultRenderGlobals.currentRenderer').lower() - # handle various renderman names - if renderer.startswith('renderman'): - renderer = 'renderman' - - render_settings = RenderSettings(project_settings) - render_settings.set_default_renderer_settings(renderer) - def set_default_renderer_settings(self, renderer=None): """Set basic settings based on renderer.""" if not renderer: diff --git a/openpype/hosts/maya/plugins/create/create_render.py b/openpype/hosts/maya/plugins/create/create_render.py index b73f550fa2..d4ad488b32 100644 --- a/openpype/hosts/maya/plugins/create/create_render.py +++ b/openpype/hosts/maya/plugins/create/create_render.py @@ -164,8 +164,6 @@ class CreateRender(plugin.Creator): collection = render_layer.createCollection("defaultCollection") collection.getSelector().setPattern('*') - self.log.info("Applying default render settings..") - lib_rendersettings.RenderSettings.apply_defaults() return self.instance def _deadline_webservice_changed(self): From 137ba908b51acf1a79963e71dc9278ec935f002a Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 26 Jul 2022 22:08:23 +0200 Subject: [PATCH 557/785] nuke: code style improvements --- .../plugins/publish/extract_render_local.py | 2 +- .../plugins/publish/precollect_instances.py | 17 ++++++++++------- .../nuke/plugins/publish/precollect_writes.py | 6 ++++-- 3 files changed, 15 insertions(+), 10 deletions(-) diff --git a/openpype/hosts/nuke/plugins/publish/extract_render_local.py b/openpype/hosts/nuke/plugins/publish/extract_render_local.py index 1595fe03fb..7e66cdccda 100644 --- a/openpype/hosts/nuke/plugins/publish/extract_render_local.py +++ b/openpype/hosts/nuke/plugins/publish/extract_render_local.py @@ -123,4 +123,4 @@ class NukeRenderLocal(openpype.api.Extractor): self.log.info('Finished render') - self.log.debug("instance extracted: {}".format(instance.data)) + self.log.debug("_ instance.data: {}".format(instance.data)) diff --git a/openpype/hosts/nuke/plugins/publish/precollect_instances.py b/openpype/hosts/nuke/plugins/publish/precollect_instances.py index b0da94c4ce..b396056eb9 100644 --- a/openpype/hosts/nuke/plugins/publish/precollect_instances.py +++ b/openpype/hosts/nuke/plugins/publish/precollect_instances.py @@ -50,7 +50,7 @@ class PreCollectNukeInstances(pyblish.api.ContextPlugin): # establish families family = avalon_knob_data["family"] families_ak = avalon_knob_data.get("families", []) - families = list() + families = [] # except disabled nodes but exclude backdrops in test if ("nukenodes" not in family) and (node["disable"].value()): @@ -111,10 +111,10 @@ class PreCollectNukeInstances(pyblish.api.ContextPlugin): self.log.debug("__ families: `{}`".format(families)) # Get format - format = root['format'].value() - resolution_width = format.width() - resolution_height = format.height() - pixel_aspect = format.pixelAspect() + format_ = root['format'].value() + resolution_width = format_.width() + resolution_height = format_.height() + pixel_aspect = format_.pixelAspect() # get publish knob value if "publish" not in node.knobs(): @@ -125,8 +125,11 @@ class PreCollectNukeInstances(pyblish.api.ContextPlugin): self.log.debug("__ _families_test: `{}`".format(_families_test)) for family_test in _families_test: if family_test in self.sync_workfile_version_on_families: - self.log.debug("Syncing version with workfile for '{}'" - .format(family_test)) + self.log.debug( + "Syncing version with workfile for '{}'".format( + family_test + ) + ) # get version to instance for integration instance.data['version'] = instance.context.data['version'] diff --git a/openpype/hosts/nuke/plugins/publish/precollect_writes.py b/openpype/hosts/nuke/plugins/publish/precollect_writes.py index a97f34b370..e37cc8a80a 100644 --- a/openpype/hosts/nuke/plugins/publish/precollect_writes.py +++ b/openpype/hosts/nuke/plugins/publish/precollect_writes.py @@ -144,8 +144,10 @@ class CollectNukeWrites(pyblish.api.InstancePlugin): self.log.debug("colorspace: `{}`".format(colorspace)) version_data = { - "families": [f.replace(".local", "").replace(".farm", "") - for f in _families_test if "write" not in f], + "families": [ + _f.replace(".local", "").replace(".farm", "") + for _f in _families_test if "write" != _f + ], "colorspace": colorspace } From 951cc995a52057e163f5cda99b492faf225adb40 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 26 Jul 2022 22:09:06 +0200 Subject: [PATCH 558/785] nuke: fixing family after local render anatomyData family should be also changed --- openpype/hosts/nuke/plugins/publish/extract_render_local.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/openpype/hosts/nuke/plugins/publish/extract_render_local.py b/openpype/hosts/nuke/plugins/publish/extract_render_local.py index 7e66cdccda..6f0196690c 100644 --- a/openpype/hosts/nuke/plugins/publish/extract_render_local.py +++ b/openpype/hosts/nuke/plugins/publish/extract_render_local.py @@ -105,13 +105,16 @@ class NukeRenderLocal(openpype.api.Extractor): instance.data['family'] = 'render' families.remove('render.local') families.insert(0, "render2d") + instance.data["anatomyData"]["family"] = "render" elif "prerender.local" in families: instance.data['family'] = 'prerender' families.remove('prerender.local') families.insert(0, "prerender") + instance.data["anatomyData"]["family"] = "prerender" elif "still.local" in families: instance.data['family'] = 'image' families.remove('still.local') + instance.data["anatomyData"]["family"] = "image" instance.data["families"] = families collections, remainder = clique.assemble(filenames) From 955423c9eb379cd9f90662672d8b91f3c96bb85a Mon Sep 17 00:00:00 2001 From: OpenPype Date: Wed, 27 Jul 2022 04:03:52 +0000 Subject: [PATCH 559/785] [Automated] Bump version --- CHANGELOG.md | 38 ++++++++++++-------------------------- openpype/version.py | 2 +- pyproject.toml | 2 +- 3 files changed, 14 insertions(+), 28 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index ec880b9c61..133be18f68 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,6 @@ # Changelog -## [3.12.2-nightly.3](https://github.com/pypeclub/OpenPype/tree/HEAD) +## [3.12.2-nightly.4](https://github.com/pypeclub/OpenPype/tree/HEAD) [Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.12.1...HEAD) @@ -11,21 +11,22 @@ **🚀 Enhancements** +- General: Global thumbnail extractor is ready for more cases [\#3561](https://github.com/pypeclub/OpenPype/pull/3561) - Maya: add additional validators to Settings [\#3540](https://github.com/pypeclub/OpenPype/pull/3540) - General: Interactive console in cli [\#3526](https://github.com/pypeclub/OpenPype/pull/3526) - Ftrack: Automatic daily review session creation can define trigger hour [\#3516](https://github.com/pypeclub/OpenPype/pull/3516) - Ftrack: add source into Note [\#3509](https://github.com/pypeclub/OpenPype/pull/3509) -- Ftrack: Trigger custom ftrack topic of project structure creation [\#3506](https://github.com/pypeclub/OpenPype/pull/3506) -- Settings UI: Add extract to file action on project view [\#3505](https://github.com/pypeclub/OpenPype/pull/3505) - Add pack and unpack convenience scripts [\#3502](https://github.com/pypeclub/OpenPype/pull/3502) - General: Event system [\#3499](https://github.com/pypeclub/OpenPype/pull/3499) - NewPublisher: Keep plugins with mismatch target in report [\#3498](https://github.com/pypeclub/OpenPype/pull/3498) - Nuke: load clip with options from settings [\#3497](https://github.com/pypeclub/OpenPype/pull/3497) - TrayPublisher: implemented render\_mov\_batch [\#3486](https://github.com/pypeclub/OpenPype/pull/3486) -- Migrate basic families to the new Tray Publisher [\#3469](https://github.com/pypeclub/OpenPype/pull/3469) **🐛 Bug fixes** +- Maya: fix Review image plane attribute [\#3569](https://github.com/pypeclub/OpenPype/pull/3569) +- Maya: Fix animated attributes \(ie. overscan\) on loaded cameras breaking review publishing. [\#3562](https://github.com/pypeclub/OpenPype/pull/3562) +- NewPublisher: Python 2 compatible html escape [\#3559](https://github.com/pypeclub/OpenPype/pull/3559) - Remove invalid submodules from `/vendor` [\#3557](https://github.com/pypeclub/OpenPype/pull/3557) - General: Remove hosts filter on integrator plugins [\#3556](https://github.com/pypeclub/OpenPype/pull/3556) - Settings: Clean default values of environments [\#3550](https://github.com/pypeclub/OpenPype/pull/3550) @@ -44,13 +45,19 @@ **🔀 Refactored code** +- General: Use query functions in integrator [\#3563](https://github.com/pypeclub/OpenPype/pull/3563) +- General: Mongo core connection moved to client [\#3531](https://github.com/pypeclub/OpenPype/pull/3531) - Refactor Integrate Asset [\#3530](https://github.com/pypeclub/OpenPype/pull/3530) - General: Client docstrings cleanup [\#3529](https://github.com/pypeclub/OpenPype/pull/3529) +- General: Move load related functions into pipeline [\#3527](https://github.com/pypeclub/OpenPype/pull/3527) - General: Get current context document functions [\#3522](https://github.com/pypeclub/OpenPype/pull/3522) - Kitsu: Use query function from client [\#3496](https://github.com/pypeclub/OpenPype/pull/3496) -- TimersManager: Use query functions [\#3495](https://github.com/pypeclub/OpenPype/pull/3495) - Deadline: Use query functions [\#3466](https://github.com/pypeclub/OpenPype/pull/3466) +**Merged pull requests:** + +- Maya: fix active pane loss [\#3566](https://github.com/pypeclub/OpenPype/pull/3566) + ## [3.12.1](https://github.com/pypeclub/OpenPype/tree/3.12.1) (2022-07-13) [Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.12.1-nightly.6...3.12.1) @@ -59,10 +66,6 @@ - Docs: Added minimal permissions for MongoDB [\#3441](https://github.com/pypeclub/OpenPype/pull/3441) -**🆕 New features** - -- Maya: Add VDB to Arnold loader [\#3433](https://github.com/pypeclub/OpenPype/pull/3433) - **🚀 Enhancements** - TrayPublisher: Added more options for grouping of instances [\#3494](https://github.com/pypeclub/OpenPype/pull/3494) @@ -72,8 +75,6 @@ - General: Better arguments order in creator init [\#3475](https://github.com/pypeclub/OpenPype/pull/3475) - Ftrack: Trigger custom ftrack events on project creation and preparation [\#3465](https://github.com/pypeclub/OpenPype/pull/3465) - Windows installer: Clean old files and add version subfolder [\#3445](https://github.com/pypeclub/OpenPype/pull/3445) -- Blender: Bugfix - Set fps properly on open [\#3426](https://github.com/pypeclub/OpenPype/pull/3426) -- Hiero: Add custom scripts menu [\#3425](https://github.com/pypeclub/OpenPype/pull/3425) **🐛 Bug fixes** @@ -92,7 +93,6 @@ - Nuke: prerender reviewable fails [\#3450](https://github.com/pypeclub/OpenPype/pull/3450) - Maya: fix hashing in Python 3 for tile rendering [\#3447](https://github.com/pypeclub/OpenPype/pull/3447) - LogViewer: Escape html characters in log message [\#3443](https://github.com/pypeclub/OpenPype/pull/3443) -- Nuke: Slate frame is integrated [\#3427](https://github.com/pypeclub/OpenPype/pull/3427) **🔀 Refactored code** @@ -111,20 +111,6 @@ [Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.12.0-nightly.3...3.12.0) -**🚀 Enhancements** - -- Webserver: Added CORS middleware [\#3422](https://github.com/pypeclub/OpenPype/pull/3422) - -**🐛 Bug fixes** - -- NewPublisher: Fix subset name change on change of creator plugin [\#3420](https://github.com/pypeclub/OpenPype/pull/3420) -- Bug: fix invalid avalon import [\#3418](https://github.com/pypeclub/OpenPype/pull/3418) - -**🔀 Refactored code** - -- Unreal: Use client query functions [\#3421](https://github.com/pypeclub/OpenPype/pull/3421) -- General: Move editorial lib to pipeline [\#3419](https://github.com/pypeclub/OpenPype/pull/3419) - ## [3.11.1](https://github.com/pypeclub/OpenPype/tree/3.11.1) (2022-06-20) [Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.11.1-nightly.1...3.11.1) diff --git a/openpype/version.py b/openpype/version.py index 9dda1eacce..9388d4219e 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.12.2-nightly.3" +__version__ = "3.12.2-nightly.4" diff --git a/pyproject.toml b/pyproject.toml index eebc8a5600..0a9c02834a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "OpenPype" -version = "3.12.2-nightly.3" # OpenPype +version = "3.12.2-nightly.4" # OpenPype description = "Open VFX and Animation pipeline with support." authors = ["OpenPype Team "] license = "MIT License" From 078bcb3b8e027948b8951920821e63392378c787 Mon Sep 17 00:00:00 2001 From: OpenPype Date: Wed, 27 Jul 2022 07:58:18 +0000 Subject: [PATCH 560/785] [Automated] Release --- CHANGELOG.md | 5 ++--- openpype/version.py | 2 +- pyproject.toml | 2 +- 3 files changed, 4 insertions(+), 5 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 133be18f68..e4fc1d59ca 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,8 +1,8 @@ # Changelog -## [3.12.2-nightly.4](https://github.com/pypeclub/OpenPype/tree/HEAD) +## [3.12.2](https://github.com/pypeclub/OpenPype/tree/3.12.2) (2022-07-27) -[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.12.1...HEAD) +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.12.1...3.12.2) ### 📖 Documentation @@ -17,7 +17,6 @@ - Ftrack: Automatic daily review session creation can define trigger hour [\#3516](https://github.com/pypeclub/OpenPype/pull/3516) - Ftrack: add source into Note [\#3509](https://github.com/pypeclub/OpenPype/pull/3509) - Add pack and unpack convenience scripts [\#3502](https://github.com/pypeclub/OpenPype/pull/3502) -- General: Event system [\#3499](https://github.com/pypeclub/OpenPype/pull/3499) - NewPublisher: Keep plugins with mismatch target in report [\#3498](https://github.com/pypeclub/OpenPype/pull/3498) - Nuke: load clip with options from settings [\#3497](https://github.com/pypeclub/OpenPype/pull/3497) - TrayPublisher: implemented render\_mov\_batch [\#3486](https://github.com/pypeclub/OpenPype/pull/3486) diff --git a/openpype/version.py b/openpype/version.py index 9388d4219e..5c39e9e630 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.12.2-nightly.4" +__version__ = "3.12.2" diff --git a/pyproject.toml b/pyproject.toml index 0a9c02834a..175e72be24 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "OpenPype" -version = "3.12.2-nightly.4" # OpenPype +version = "3.12.2" # OpenPype description = "Open VFX and Animation pipeline with support." authors = ["OpenPype Team "] license = "MIT License" From 361ba53f26d89e94758ff8f32e48444ba1715771 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 27 Jul 2022 10:54:57 +0200 Subject: [PATCH 561/785] use new location of 'get_default_components' function --- start.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/start.py b/start.py index ace33ab92a..08e0849303 100644 --- a/start.py +++ b/start.py @@ -1113,7 +1113,7 @@ def boot(): def get_info(use_staging=None) -> list: """Print additional information to console.""" - from openpype.lib.mongo import get_default_components + from openpype.client.mongo import get_default_components from openpype.lib.log import PypeLogger components = get_default_components() From bfbb1225d0ed7a7acccf900e42bdccad60a05ced Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 27 Jul 2022 10:57:19 +0200 Subject: [PATCH 562/785] Use 'Logger' instead of 'PypeLogger' --- start.py | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/start.py b/start.py index 08e0849303..e83589d160 100644 --- a/start.py +++ b/start.py @@ -1114,7 +1114,11 @@ def boot(): def get_info(use_staging=None) -> list: """Print additional information to console.""" from openpype.client.mongo import get_default_components - from openpype.lib.log import PypeLogger + try: + from openpype.lib.log import Logger + except ImportError: + # Backwards compatibility for 'PypeLogger' + from openpype.lib.log import PypeLogger as Logger components = get_default_components() @@ -1141,14 +1145,14 @@ def get_info(use_staging=None) -> list: os.environ.get("MUSTER_REST_URL"))) # Reinitialize - PypeLogger.initialize() + Logger.initialize() mongo_components = get_default_components() if mongo_components["host"]: inf.append(("Logging to MongoDB", mongo_components["host"])) inf.append((" - port", mongo_components["port"] or "")) - inf.append((" - database", PypeLogger.log_database_name)) - inf.append((" - collection", PypeLogger.log_collection_name)) + inf.append((" - database", Logger.log_database_name)) + inf.append((" - collection", Logger.log_collection_name)) inf.append((" - user", mongo_components["username"] or "")) if mongo_components["auth_db"]: inf.append((" - auth source", mongo_components["auth_db"])) From f7cb4cd83a4fc107b2960903ee8b87fc28c0052c Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 27 Jul 2022 11:01:54 +0200 Subject: [PATCH 563/785] added missing default settings --- .../settings/defaults/system_settings/modules.json | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/openpype/settings/defaults/system_settings/modules.json b/openpype/settings/defaults/system_settings/modules.json index 9d8910689a..3ed41c7a49 100644 --- a/openpype/settings/defaults/system_settings/modules.json +++ b/openpype/settings/defaults/system_settings/modules.json @@ -131,16 +131,17 @@ } } }, + "kitsu": { + "enabled": false, + "server": "" + }, "shotgrid": { "enabled": false, "leecher_manager_url": "http://127.0.0.1:3000", "leecher_backend_url": "http://127.0.0.1:8090", + "filter_projects_by_login": true, "shotgrid_settings": {} }, - "kitsu": { - "enabled": false, - "server": "" - }, "timers_manager": { "enabled": true, "auto_stop": true, @@ -209,4 +210,4 @@ "linux": "" } } -} +} \ No newline at end of file From a3a839181b0fa94d5696a53c8a4d52cc8aed4119 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 27 Jul 2022 11:21:20 +0200 Subject: [PATCH 564/785] global, flame, hiero, resolve, sp: implementing `newAssetPublishing` --- .../plugins/publish/collect_timeline_instances.py | 3 ++- .../hiero/plugins/publish/precollect_instances.py | 3 ++- .../resolve/plugins/publish/precollect_instances.py | 3 ++- .../plugins/publish/collect_editorial_instances.py | 3 ++- openpype/plugins/publish/integrate.py | 11 ++++++++++- openpype/plugins/publish/validate_asset_docs.py | 4 ++++ 6 files changed, 22 insertions(+), 5 deletions(-) diff --git a/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py b/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py index 5db89a0ab9..992db62c75 100644 --- a/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py +++ b/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py @@ -136,7 +136,8 @@ class CollectTimelineInstances(pyblish.api.ContextPlugin): "tasks": { task["name"]: {"type": task["type"]} for task in self.add_tasks}, - "representations": [] + "representations": [], + "newAssetPublishing": True }) self.log.debug("__ inst_data: {}".format(pformat(inst_data))) diff --git a/openpype/hosts/hiero/plugins/publish/precollect_instances.py b/openpype/hosts/hiero/plugins/publish/precollect_instances.py index 2d0ec6fc99..0c7dbc1f22 100644 --- a/openpype/hosts/hiero/plugins/publish/precollect_instances.py +++ b/openpype/hosts/hiero/plugins/publish/precollect_instances.py @@ -109,7 +109,8 @@ class PrecollectInstances(pyblish.api.ContextPlugin): "clipAnnotations": annotations, # add all additional tags - "tags": phiero.get_track_item_tags(track_item) + "tags": phiero.get_track_item_tags(track_item), + "newAssetPublishing": True }) # otio clip data diff --git a/openpype/hosts/resolve/plugins/publish/precollect_instances.py b/openpype/hosts/resolve/plugins/publish/precollect_instances.py index 8f1a13a4e5..ee51998c0d 100644 --- a/openpype/hosts/resolve/plugins/publish/precollect_instances.py +++ b/openpype/hosts/resolve/plugins/publish/precollect_instances.py @@ -70,7 +70,8 @@ class PrecollectInstances(pyblish.api.ContextPlugin): "publish": resolve.get_publish_attribute(timeline_item), "fps": context.data["fps"], "handleStart": handle_start, - "handleEnd": handle_end + "handleEnd": handle_end, + "newAssetPublishing": True }) # otio clip data diff --git a/openpype/hosts/standalonepublisher/plugins/publish/collect_editorial_instances.py b/openpype/hosts/standalonepublisher/plugins/publish/collect_editorial_instances.py index 3237fbbe12..75c260bad7 100644 --- a/openpype/hosts/standalonepublisher/plugins/publish/collect_editorial_instances.py +++ b/openpype/hosts/standalonepublisher/plugins/publish/collect_editorial_instances.py @@ -170,7 +170,8 @@ class CollectInstances(pyblish.api.InstancePlugin): "frameStart": frame_start, "frameEnd": frame_end, "frameStartH": frame_start - handle_start, - "frameEndH": frame_end + handle_end + "frameEndH": frame_end + handle_end, + "newAssetPublishing": True } for data_key in instance_data_filter: diff --git a/openpype/plugins/publish/integrate.py b/openpype/plugins/publish/integrate.py index 8ab508adc9..a4378bf58d 100644 --- a/openpype/plugins/publish/integrate.py +++ b/openpype/plugins/publish/integrate.py @@ -12,6 +12,7 @@ import pyblish.api import openpype.api from openpype.client import ( get_representations, + get_asset_by_name, get_subset_by_name, get_version_by_name, ) @@ -273,6 +274,14 @@ class IntegrateAsset(pyblish.api.InstancePlugin): def register(self, instance, file_transactions, filtered_repres): project_name = legacy_io.active_project() + # making sure editorial instances have its `assetEntity` + if instance.data.get("newAssetPublishing"): + asset_doc = get_asset_by_name( + project_name, + instance.data["asset"] + ) + instance.data["assetEntity"] = asset_doc + instance_stagingdir = instance.data.get("stagingDir") if not instance_stagingdir: self.log.info(( @@ -426,7 +435,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin): "".format(len(prepared_representations))) def prepare_subset(self, instance, project_name): - asset_doc = instance.data.get("assetEntity") + asset_doc = instance.data["assetEntity"] subset_name = instance.data["subset"] self.log.debug("Subset: {}".format(subset_name)) diff --git a/openpype/plugins/publish/validate_asset_docs.py b/openpype/plugins/publish/validate_asset_docs.py index bc1f9b9e6c..9a1ca5b8de 100644 --- a/openpype/plugins/publish/validate_asset_docs.py +++ b/openpype/plugins/publish/validate_asset_docs.py @@ -24,6 +24,10 @@ class ValidateAssetDocs(pyblish.api.InstancePlugin): if instance.data.get("assetEntity"): self.log.info("Instance has set asset document in its data.") + elif instance.data.get("newAssetPublishing"): + # skip if it is editorial + self.log.info("Editorial instance is no need to check...") + else: raise PublishValidationError(( "Instance \"{}\" doesn't have asset document " From e8a8f86cdf387e777914ae833ea7f469bc63b11c Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 27 Jul 2022 12:32:09 +0200 Subject: [PATCH 565/785] global: removing changes from integrate --- openpype/plugins/publish/integrate.py | 8 -------- 1 file changed, 8 deletions(-) diff --git a/openpype/plugins/publish/integrate.py b/openpype/plugins/publish/integrate.py index a4378bf58d..74227fdb40 100644 --- a/openpype/plugins/publish/integrate.py +++ b/openpype/plugins/publish/integrate.py @@ -274,14 +274,6 @@ class IntegrateAsset(pyblish.api.InstancePlugin): def register(self, instance, file_transactions, filtered_repres): project_name = legacy_io.active_project() - # making sure editorial instances have its `assetEntity` - if instance.data.get("newAssetPublishing"): - asset_doc = get_asset_by_name( - project_name, - instance.data["asset"] - ) - instance.data["assetEntity"] = asset_doc - instance_stagingdir = instance.data.get("stagingDir") if not instance_stagingdir: self.log.info(( From fac4529e4df877bdf5f774907430f9b5662636eb Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 27 Jul 2022 12:32:44 +0200 Subject: [PATCH 566/785] global: integrate hierarchy is fixing avalonData and avalonEntity --- .../publish/extract_hierarchy_avalon.py | 48 +++++++++++++++---- 1 file changed, 40 insertions(+), 8 deletions(-) diff --git a/openpype/plugins/publish/extract_hierarchy_avalon.py b/openpype/plugins/publish/extract_hierarchy_avalon.py index 8d447ba595..967381b02e 100644 --- a/openpype/plugins/publish/extract_hierarchy_avalon.py +++ b/openpype/plugins/publish/extract_hierarchy_avalon.py @@ -30,9 +30,15 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin): self.log.debug("__ hierarchy_context: {}".format(hierarchy_context)) self.project = None - self.import_to_avalon(project_name, hierarchy_context) + self.import_to_avalon(context, project_name, hierarchy_context) - def import_to_avalon(self, project_name, input_data, parent=None): + def import_to_avalon( + self, + context, + project_name, + input_data, + parent=None, + ): for name in input_data: self.log.info("input_data[name]: {}".format(input_data[name])) entity_data = input_data[name] @@ -133,6 +139,9 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin): # Unarchive if entity was archived entity = self.unarchive_entity(unarchive_entity, data) + # make sure all relative instances have correct avalon data + self._set_avalon_data_to_relative_instances(context, entity) + if update_data: # Update entity data with input data legacy_io.update_many( @@ -142,7 +151,7 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin): if "childs" in entity_data: self.import_to_avalon( - project_name, entity_data["childs"], entity + context, project_name, entity_data["childs"], entity ) def unarchive_entity(self, entity, data): @@ -159,20 +168,43 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin): {"_id": entity["_id"]}, new_entity ) + return new_entity - def create_avalon_asset(self, project_name, name, data): - item = { + def create_avalon_asset(self, name, data): + asset_doc = { "schema": "openpype:asset-3.0", "name": name, "parent": self.project["_id"], "type": "asset", "data": data } - self.log.debug("Creating asset: {}".format(item)) - entity_id = legacy_io.insert_one(item).inserted_id + self.log.debug("Creating asset: {}".format(asset_doc)) + asset_doc["_id"] = legacy_io.insert_one(asset_doc).inserted_id - return get_asset_by_id(project_name, entity_id) + return asset_doc + + def _set_avalon_data_to_relative_instances(self, context, asset_doc): + for instance in context: + asset_name = asset_doc["name"] + inst_asset_name = instance.data["asset"] + + if asset_name == inst_asset_name: + instance.data["assetEntity"] = asset_doc + + # get parenting data + parents = asset_doc["data"].get("parents") or list() + + # equire only relative parent + if parents: + parent_name = parents[-1] + + # update avalon data on instance + instance.data["avalonData"].update({ + "hierarchy": "/".join(parents), + "task": {}, + "parent": parent_name + }) def _get_active_assets(self, context): """ Returns only asset dictionary. From 9b14e486579e209f2ff100842c081fc938406c8c Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 27 Jul 2022 12:38:14 +0200 Subject: [PATCH 567/785] fixing avalonData to anatomyData --- openpype/plugins/publish/extract_hierarchy_avalon.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/plugins/publish/extract_hierarchy_avalon.py b/openpype/plugins/publish/extract_hierarchy_avalon.py index 967381b02e..01dc80d6ee 100644 --- a/openpype/plugins/publish/extract_hierarchy_avalon.py +++ b/openpype/plugins/publish/extract_hierarchy_avalon.py @@ -200,7 +200,7 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin): parent_name = parents[-1] # update avalon data on instance - instance.data["avalonData"].update({ + instance.data["anatomyData"].update({ "hierarchy": "/".join(parents), "task": {}, "parent": parent_name From 5af77fe04caf1b38313ce09b182aa4f3eea2946f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jakub=20Je=C5=BEek?= Date: Wed, 27 Jul 2022 12:59:41 +0200 Subject: [PATCH 568/785] Update openpype/plugins/publish/extract_hierarchy_avalon.py Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> --- openpype/plugins/publish/extract_hierarchy_avalon.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/openpype/plugins/publish/extract_hierarchy_avalon.py b/openpype/plugins/publish/extract_hierarchy_avalon.py index 01dc80d6ee..37ca42e4cc 100644 --- a/openpype/plugins/publish/extract_hierarchy_avalon.py +++ b/openpype/plugins/publish/extract_hierarchy_avalon.py @@ -186,6 +186,9 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin): def _set_avalon_data_to_relative_instances(self, context, asset_doc): for instance in context: + # Skip instance if has filled asset entity + if instance.data.get("assetEntity"): + continue asset_name = asset_doc["name"] inst_asset_name = instance.data["asset"] From e9e00831f03d69776a380d826e6a971e44855bf9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jakub=20Je=C5=BEek?= Date: Wed, 27 Jul 2022 13:00:14 +0200 Subject: [PATCH 569/785] Update openpype/plugins/publish/extract_hierarchy_avalon.py Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> --- openpype/plugins/publish/extract_hierarchy_avalon.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/plugins/publish/extract_hierarchy_avalon.py b/openpype/plugins/publish/extract_hierarchy_avalon.py index 37ca42e4cc..ec01ab4e8f 100644 --- a/openpype/plugins/publish/extract_hierarchy_avalon.py +++ b/openpype/plugins/publish/extract_hierarchy_avalon.py @@ -199,6 +199,7 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin): parents = asset_doc["data"].get("parents") or list() # equire only relative parent + parent_name = project_name if parents: parent_name = parents[-1] From 203048bcf814a5ab8e05f769ce19d52fd19937db Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jakub=20Je=C5=BEek?= Date: Wed, 27 Jul 2022 13:00:21 +0200 Subject: [PATCH 570/785] Update openpype/plugins/publish/integrate.py Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> --- openpype/plugins/publish/integrate.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/plugins/publish/integrate.py b/openpype/plugins/publish/integrate.py index 74227fdb40..cac212b7e2 100644 --- a/openpype/plugins/publish/integrate.py +++ b/openpype/plugins/publish/integrate.py @@ -12,7 +12,6 @@ import pyblish.api import openpype.api from openpype.client import ( get_representations, - get_asset_by_name, get_subset_by_name, get_version_by_name, ) From 86d9d0134ad57ebb1a07cdf3dd6d6ef13d466d0d Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 27 Jul 2022 13:02:45 +0200 Subject: [PATCH 571/785] fixing missing project_name --- .../plugins/publish/extract_hierarchy_avalon.py | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/openpype/plugins/publish/extract_hierarchy_avalon.py b/openpype/plugins/publish/extract_hierarchy_avalon.py index ec01ab4e8f..d765755eee 100644 --- a/openpype/plugins/publish/extract_hierarchy_avalon.py +++ b/openpype/plugins/publish/extract_hierarchy_avalon.py @@ -140,7 +140,11 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin): entity = self.unarchive_entity(unarchive_entity, data) # make sure all relative instances have correct avalon data - self._set_avalon_data_to_relative_instances(context, entity) + self._set_avalon_data_to_relative_instances( + context, + project_name, + entity + ) if update_data: # Update entity data with input data @@ -184,7 +188,12 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin): return asset_doc - def _set_avalon_data_to_relative_instances(self, context, asset_doc): + def _set_avalon_data_to_relative_instances( + self, + context, + project_name, + asset_doc + ): for instance in context: # Skip instance if has filled asset entity if instance.data.get("assetEntity"): From a0149c36ffd80d1dcc5a2b08c5c09d37062de621 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 27 Jul 2022 13:14:35 +0200 Subject: [PATCH 572/785] fixing problem with more function argumets --- openpype/plugins/publish/extract_hierarchy_avalon.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/plugins/publish/extract_hierarchy_avalon.py b/openpype/plugins/publish/extract_hierarchy_avalon.py index d765755eee..6b4e5f48c5 100644 --- a/openpype/plugins/publish/extract_hierarchy_avalon.py +++ b/openpype/plugins/publish/extract_hierarchy_avalon.py @@ -133,7 +133,7 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin): if unarchive_entity is None: # Create entity if doesn"t exist entity = self.create_avalon_asset( - project_name, name, data + name, data ) else: # Unarchive if entity was archived From 2e0fe9335151c6b7cdc9d25011216ca3b2705f5d Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 27 Jul 2022 13:16:46 +0200 Subject: [PATCH 573/785] use KnownPublishError instead of assertions --- openpype/plugins/publish/integrate.py | 42 ++++++++++++++++----------- 1 file changed, 25 insertions(+), 17 deletions(-) diff --git a/openpype/plugins/publish/integrate.py b/openpype/plugins/publish/integrate.py index 8ab508adc9..e87538a5a4 100644 --- a/openpype/plugins/publish/integrate.py +++ b/openpype/plugins/publish/integrate.py @@ -517,14 +517,16 @@ class IntegrateAsset(pyblish.api.InstancePlugin): # pre-flight validations if repre["ext"].startswith("."): - raise ValueError("Extension must not start with a dot '.': " - "{}".format(repre["ext"])) + raise KnownPublishError(( + "Extension must not start with a dot '.': {}" + ).format(repre["ext"])) if repre.get("transfers"): - raise ValueError("Representation is not allowed to have transfers" - "data before integration. They are computed in " - "the integrator" - "Got: {}".format(repre["transfers"])) + raise KnownPublishError(( + "Representation is not allowed to have transfers" + "data before integration. They are computed in " + "the integrator. Got: {}" + ).format(repre["transfers"])) # create template data for Anatomy template_data = copy.deepcopy(instance.data["anatomyData"]) @@ -563,8 +565,9 @@ class IntegrateAsset(pyblish.api.InstancePlugin): "{}".format(instance_stagingdir)) stagingdir = instance_stagingdir if not stagingdir: - raise ValueError("No staging directory set for representation: " - "{}".format(repre)) + raise KnownPublishError( + "No staging directory set for representation: {}".format(repre) + ) self.log.debug("Anatomy template name: {}".format(template_name)) anatomy = instance.context.data['anatomy'] @@ -574,9 +577,8 @@ class IntegrateAsset(pyblish.api.InstancePlugin): is_sequence_representation = isinstance(files, (list, tuple)) if is_sequence_representation: # Collection of files (sequence) - assert not any(os.path.isabs(fname) for fname in files), ( - "Given file names contain full paths" - ) + if any(os.path.isabs(fname) for fname in files): + raise KnownPublishError("Given file names contain full paths") src_collection = assemble(files) @@ -632,9 +634,11 @@ class IntegrateAsset(pyblish.api.InstancePlugin): dst_collection.indexes.clear() dst_collection.indexes.update(set(destination_indexes)) dst_collection.padding = destination_padding - assert ( - len(src_collection.indexes) == len(dst_collection.indexes) - ), "This is a bug" + if len(src_collection.indexes) != len(dst_collection.indexes): + raise KnownPublishError(( + "This is a bug. Source sequence frames length" + " does not match integration frames length" + )) # Multiple file transfers transfers = [] @@ -645,9 +649,13 @@ class IntegrateAsset(pyblish.api.InstancePlugin): else: # Single file fname = files - assert not os.path.isabs(fname), ( - "Given file name is a full path" - ) + if os.path.isabs(fname): + self.log.error( + "Filename in representation is filepath {}".format(fname) + ) + raise KnownPublishError( + "This is a bug. Representation file name is full path" + ) # Manage anatomy template data template_data.pop("frame", None) From 1bb9b27c7ff5a8c7d0a8fb4c1e631e5e6d33be1d Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 27 Jul 2022 13:17:07 +0200 Subject: [PATCH 574/785] simplified staging dir resolving --- openpype/plugins/publish/integrate.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/openpype/plugins/publish/integrate.py b/openpype/plugins/publish/integrate.py index e87538a5a4..fdf5b21a6b 100644 --- a/openpype/plugins/publish/integrate.py +++ b/openpype/plugins/publish/integrate.py @@ -556,14 +556,15 @@ class IntegrateAsset(pyblish.api.InstancePlugin): continue template_data[anatomy_key] = value - if repre.get('stagingDir'): - stagingdir = repre['stagingDir'] - else: + stagingdir = repre.get("stagingDir") + if not stagingdir: # Fall back to instance staging dir if not explicitly # set for representation in the instance - self.log.debug("Representation uses instance staging dir: " - "{}".format(instance_stagingdir)) + self.log.debug(( + "Representation uses instance staging dir: {}" + ).format(instance_stagingdir)) stagingdir = instance_stagingdir + if not stagingdir: raise KnownPublishError( "No staging directory set for representation: {}".format(repre) From 89d49533e4f15b3e055be9d01250780abb1bc199 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 27 Jul 2022 13:17:56 +0200 Subject: [PATCH 575/785] add the values only if they are not 'None' --- openpype/plugins/publish/integrate.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/openpype/plugins/publish/integrate.py b/openpype/plugins/publish/integrate.py index fdf5b21a6b..87058dd2da 100644 --- a/openpype/plugins/publish/integrate.py +++ b/openpype/plugins/publish/integrate.py @@ -686,9 +686,8 @@ class IntegrateAsset(pyblish.api.InstancePlugin): # Also add these values to the context even if not used by the # destination template value = template_data.get(key) - if not value: - continue - repre_context[key] = template_data[key] + if value is not None: + repre_context[key] = value # Explicitly store the full list even though template data might # have a different value because it uses just a single udim tile From 5272907504aa4b6e825d715dd7b9c1714f6fb85b Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 27 Jul 2022 13:18:34 +0200 Subject: [PATCH 576/785] import source_hash directly --- openpype/plugins/publish/integrate.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/openpype/plugins/publish/integrate.py b/openpype/plugins/publish/integrate.py index 87058dd2da..a5f5a66091 100644 --- a/openpype/plugins/publish/integrate.py +++ b/openpype/plugins/publish/integrate.py @@ -9,12 +9,12 @@ from bson.objectid import ObjectId from pymongo import DeleteMany, ReplaceOne, InsertOne, UpdateOne import pyblish.api -import openpype.api from openpype.client import ( get_representations, get_subset_by_name, get_version_by_name, ) +from openype.lib import source_hash from openpype.lib.profiles_filtering import filter_profiles from openpype.lib.file_transaction import FileTransaction from openpype.pipeline import legacy_io @@ -834,6 +834,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin): def get_profile_filter_criteria(self, instance): """Return filter criteria for `filter_profiles`""" + # Anatomy data is pre-filled by Collectors anatomy_data = instance.data["anatomyData"] @@ -864,6 +865,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin): path: modified path if possible, or unmodified path + warning logged """ + success, rootless_path = anatomy.find_root_template_from_path(path) if success: path = rootless_path @@ -885,6 +887,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin): output_resources: array of dictionaries to be added to 'files' key in representation """ + file_infos = [] for file_path in destinations: file_info = self.prepare_file_info(file_path, anatomy, sites=sites) @@ -904,10 +907,11 @@ class IntegrateAsset(pyblish.api.InstancePlugin): Returns: dict: file info dictionary """ + return { "_id": ObjectId(), "path": self.get_rootless_path(anatomy, path), "size": os.path.getsize(path), - "hash": openpype.api.source_hash(path), + "hash": source_hash(path), "sites": sites } From 0c061c50276ac68ead8b7d3918b007e65ab543e8 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 27 Jul 2022 13:26:38 +0200 Subject: [PATCH 577/785] added "output" to representation context keys to auto fill it to context --- openpype/plugins/publish/integrate.py | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/openpype/plugins/publish/integrate.py b/openpype/plugins/publish/integrate.py index a5f5a66091..52a5ea2bfc 100644 --- a/openpype/plugins/publish/integrate.py +++ b/openpype/plugins/publish/integrate.py @@ -168,7 +168,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin): # the database even if not used by the destination template db_representation_context_keys = [ "project", "asset", "task", "subset", "version", "representation", - "family", "hierarchy", "username" + "family", "hierarchy", "username", "output" ] skip_host_families = [] @@ -727,11 +727,6 @@ class IntegrateAsset(pyblish.api.InstancePlugin): "context": repre_context } - # todo: simplify/streamline which additional data makes its way into - # the representation context - if repre.get("outputName"): - representation["context"]["output"] = repre['outputName'] - if is_sequence_representation and repre.get("frameStart") is not None: representation['context']['frame'] = template_data["frame"] From 9875f68cf43fef06e4670c6a5c61f3b3d5c0dbb0 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 27 Jul 2022 13:27:13 +0200 Subject: [PATCH 578/785] don't just check existence of key but also it's value when traversing repre and instance data --- openpype/plugins/publish/integrate.py | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/openpype/plugins/publish/integrate.py b/openpype/plugins/publish/integrate.py index 52a5ea2bfc..f89e7b33ce 100644 --- a/openpype/plugins/publish/integrate.py +++ b/openpype/plugins/publish/integrate.py @@ -548,13 +548,12 @@ class IntegrateAsset(pyblish.api.InstancePlugin): }.items(): # Allow to take value from representation # if not found also consider instance.data - if key in repre: - value = repre[key] - elif key in instance.data: - value = instance.data[key] - else: - continue - template_data[anatomy_key] = value + value = repre.get(key) + if value is None: + value = instance.data.get(key) + + if value is not None: + template_data[anatomy_key] = value stagingdir = repre.get("stagingDir") if not stagingdir: From 0be6d5b55c0266241d7960a9a33056762cf788c2 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 27 Jul 2022 13:29:24 +0200 Subject: [PATCH 579/785] removed backwards compatibility comments which as it's not backwards compatibility --- openpype/plugins/publish/integrate.py | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/openpype/plugins/publish/integrate.py b/openpype/plugins/publish/integrate.py index f89e7b33ce..7dfd8e4cac 100644 --- a/openpype/plugins/publish/integrate.py +++ b/openpype/plugins/publish/integrate.py @@ -700,14 +700,12 @@ class IntegrateAsset(pyblish.api.InstancePlugin): else: repre_id = ObjectId() - # Backwards compatibility: # Store first transferred destination as published path data - # todo: can we remove this? - # todo: We shouldn't change data that makes its way back into - # instance.data[] until we know the publish actually succeeded - # otherwise `published_path` might not actually be valid? + # - used primarily for reviews that are integrated to custom modules + # TODO we should probably store all integrated files + # related to the representation? published_path = transfers[0][1] - repre["published_path"] = published_path # Backwards compatibility + repre["published_path"] = published_path # todo: `repre` is not the actual `representation` entity # we should simplify/clarify difference between data above From 12af64dbc0ed7eb6b415d55bc472c81c917eff7b Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 27 Jul 2022 13:30:34 +0200 Subject: [PATCH 580/785] use last frame instead of first frame for padding and don't look at source collection padding --- openpype/plugins/publish/integrate.py | 10 +++------- 1 file changed, 3 insertions(+), 7 deletions(-) diff --git a/openpype/plugins/publish/integrate.py b/openpype/plugins/publish/integrate.py index 7dfd8e4cac..3a86f4b373 100644 --- a/openpype/plugins/publish/integrate.py +++ b/openpype/plugins/publish/integrate.py @@ -78,12 +78,6 @@ def get_frame_padded(frame, padding): return "{frame:0{padding}d}".format(padding=padding, frame=frame) -def get_first_frame_padded(collection): - """Return first frame as padded number from `clique.Collection`""" - start_frame = next(iter(collection.indexes)) - return get_frame_padded(start_frame, padding=collection.padding) - - class IntegrateAsset(pyblish.api.InstancePlugin): """Register publish in the database and transfer files to destinations. @@ -588,7 +582,9 @@ class IntegrateAsset(pyblish.api.InstancePlugin): # differs from the collection we want to shift the destination # frame indices from the source collection. destination_indexes = list(src_collection.indexes) - destination_padding = len(get_first_frame_padded(src_collection)) + # Use last frame for minimum padding + # - that should cover both 'udim' and 'frame' minimum padding + destination_padding = len(str(destination_indexes[-1])) if repre.get("frameStart") is not None and not is_udim: index_frame_start = int(repre.get("frameStart")) From 6cab5917c4903df529429ad5e5bf209409426708 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 27 Jul 2022 13:36:23 +0200 Subject: [PATCH 581/785] use template padding for frames if padding is bigger then minimum collection's padding --- openpype/plugins/publish/integrate.py | 39 +++++++++++++-------------- 1 file changed, 19 insertions(+), 20 deletions(-) diff --git a/openpype/plugins/publish/integrate.py b/openpype/plugins/publish/integrate.py index 3a86f4b373..7a9cee593b 100644 --- a/openpype/plugins/publish/integrate.py +++ b/openpype/plugins/publish/integrate.py @@ -565,7 +565,8 @@ class IntegrateAsset(pyblish.api.InstancePlugin): self.log.debug("Anatomy template name: {}".format(template_name)) anatomy = instance.context.data['anatomy'] - template = os.path.normpath(anatomy.templates[template_name]["path"]) + publish_template_category = anatomy.templates[template_name] + template = os.path.normpath(publish_template_category["path"]) is_udim = bool(repre.get("udim")) is_sequence_representation = isinstance(files, (list, tuple)) @@ -585,27 +586,25 @@ class IntegrateAsset(pyblish.api.InstancePlugin): # Use last frame for minimum padding # - that should cover both 'udim' and 'frame' minimum padding destination_padding = len(str(destination_indexes[-1])) - if repre.get("frameStart") is not None and not is_udim: - index_frame_start = int(repre.get("frameStart")) - - render_template = anatomy.templates[template_name] - # todo: should we ALWAYS manage the frame padding even when not - # having `frameStart` set? - frame_start_padding = int( - render_template.get( - "frame_padding", - render_template.get("padding") - ) + if not is_udim: + # Change padding for frames if template has defined higher + # padding. + template_padding = int( + publish_template_category["frame_padding"] ) + if template_padding > destination_padding: + destination_padding = template_padding - # Shift destination sequence to the start frame - src_start_frame = next(iter(src_collection.indexes)) - shift = index_frame_start - src_start_frame - if shift: - destination_indexes = [ - frame + shift for frame in destination_indexes - ] - destination_padding = frame_start_padding + if repre.get("frameStart") is not None: + index_frame_start = int(repre.get("frameStart")) + + # Shift destination sequence to the start frame + src_start_frame = next(iter(src_collection.indexes)) + shift = index_frame_start - src_start_frame + if shift: + destination_indexes = [ + frame + shift for frame in destination_indexes + ] # To construct the destination template with anatomy we require # a Frame or UDIM tile set for the template data. We use the first From 3835695376ff87983124a9ac802b5ecffa5e0344 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 27 Jul 2022 13:38:51 +0200 Subject: [PATCH 582/785] simplified recalculation of destination indexes --- openpype/plugins/publish/integrate.py | 26 ++++++++++++-------------- 1 file changed, 12 insertions(+), 14 deletions(-) diff --git a/openpype/plugins/publish/integrate.py b/openpype/plugins/publish/integrate.py index 7a9cee593b..0387196a8a 100644 --- a/openpype/plugins/publish/integrate.py +++ b/openpype/plugins/publish/integrate.py @@ -577,11 +577,6 @@ class IntegrateAsset(pyblish.api.InstancePlugin): src_collection = assemble(files) - # If the representation has `frameStart` set it renumbers the - # frame indices of the published collection. It will start from - # that `frameStart` index instead. Thus if that frame start - # differs from the collection we want to shift the destination - # frame indices from the source collection. destination_indexes = list(src_collection.indexes) # Use last frame for minimum padding # - that should cover both 'udim' and 'frame' minimum padding @@ -595,16 +590,19 @@ class IntegrateAsset(pyblish.api.InstancePlugin): if template_padding > destination_padding: destination_padding = template_padding - if repre.get("frameStart") is not None: - index_frame_start = int(repre.get("frameStart")) - + # If the representation has `frameStart` set it renumbers the + # frame indices of the published collection. It will start from + # that `frameStart` index instead. Thus if that frame start + # differs from the collection we want to shift the destination + # frame indices from the source collection. + repre_frame_start = repre.get("frameStart") + if repre_frame_start is not None: + index_frame_start = int(repre["frameStart"]) # Shift destination sequence to the start frame - src_start_frame = next(iter(src_collection.indexes)) - shift = index_frame_start - src_start_frame - if shift: - destination_indexes = [ - frame + shift for frame in destination_indexes - ] + destination_indexes = [ + index_frame_start + idx + for idx in range(len(destination_indexes)) + ] # To construct the destination template with anatomy we require # a Frame or UDIM tile set for the template data. We use the first From 879df0a3a79121a2fe9472e89e99537fc24f2040 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 27 Jul 2022 13:51:16 +0200 Subject: [PATCH 583/785] unify quotations --- openpype/plugins/publish/integrate.py | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/openpype/plugins/publish/integrate.py b/openpype/plugins/publish/integrate.py index 0387196a8a..81a2190a21 100644 --- a/openpype/plugins/publish/integrate.py +++ b/openpype/plugins/publish/integrate.py @@ -526,7 +526,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin): template_data = copy.deepcopy(instance.data["anatomyData"]) # required representation keys - files = repre['files'] + files = repre["files"] template_data["representation"] = repre["name"] template_data["ext"] = repre["ext"] @@ -564,11 +564,12 @@ class IntegrateAsset(pyblish.api.InstancePlugin): ) self.log.debug("Anatomy template name: {}".format(template_name)) - anatomy = instance.context.data['anatomy'] + anatomy = instance.context.data["anatomy"] publish_template_category = anatomy.templates[template_name] template = os.path.normpath(publish_template_category["path"]) is_udim = bool(repre.get("udim")) + is_sequence_representation = isinstance(files, (list, tuple)) if is_sequence_representation: # Collection of files (sequence) @@ -704,13 +705,13 @@ class IntegrateAsset(pyblish.api.InstancePlugin): # we should simplify/clarify difference between data above # and the actual representation entity for the database data = repre.get("data", {}) - data.update({'path': published_path, 'template': template}) + data.update({"path": published_path, "template": template}) representation = { "_id": repre_id, "schema": "openpype:representation-2.0", "type": "representation", "parent": version["_id"], - "name": repre['name'], + "name": repre["name"], "data": data, # Imprint shortcut to context for performance reasons. @@ -718,7 +719,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin): } if is_sequence_representation and repre.get("frameStart") is not None: - representation['context']['frame'] = template_data["frame"] + representation["context"]["frame"] = template_data["frame"] return { "representation": representation, @@ -779,7 +780,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin): version_data[key] = instance.data[key] # Include instance.data[versionData] directly - version_data_instance = instance.data.get('versionData') + version_data_instance = instance.data.get("versionData") if version_data_instance: version_data.update(version_data_instance) From 74ad4a558d9574f85cfe852576b6fdc2d40641ad Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 27 Jul 2022 13:51:24 +0200 Subject: [PATCH 584/785] fix typo in import --- openpype/plugins/publish/integrate.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/plugins/publish/integrate.py b/openpype/plugins/publish/integrate.py index 81a2190a21..db55a17e59 100644 --- a/openpype/plugins/publish/integrate.py +++ b/openpype/plugins/publish/integrate.py @@ -14,7 +14,7 @@ from openpype.client import ( get_subset_by_name, get_version_by_name, ) -from openype.lib import source_hash +from openpype.lib import source_hash from openpype.lib.profiles_filtering import filter_profiles from openpype.lib.file_transaction import FileTransaction from openpype.pipeline import legacy_io From b5cdebe0707c9e4a9acccd16b6db92108ba8cca8 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 27 Jul 2022 13:56:39 +0200 Subject: [PATCH 585/785] make sure frame is filled durectly in sequence condition --- openpype/plugins/publish/integrate.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/openpype/plugins/publish/integrate.py b/openpype/plugins/publish/integrate.py index db55a17e59..c106649f2a 100644 --- a/openpype/plugins/publish/integrate.py +++ b/openpype/plugins/publish/integrate.py @@ -621,6 +621,13 @@ class IntegrateAsset(pyblish.api.InstancePlugin): anatomy_filled = anatomy.format(template_data) template_filled = anatomy_filled[template_name]["path"] repre_context = template_filled.used_values + + # Make sure context contains frame + # NOTE: Frame would not be available only if template does not + # contain '{frame}' in template -> Do we want support it? + if not is_udim: + repre_context["frame"] = first_index_padded + self.log.debug("Template filled: {}".format(str(template_filled))) dst_collection = assemble([os.path.normpath(template_filled)]) @@ -718,9 +725,6 @@ class IntegrateAsset(pyblish.api.InstancePlugin): "context": repre_context } - if is_sequence_representation and repre.get("frameStart") is not None: - representation["context"]["frame"] = template_data["frame"] - return { "representation": representation, "anatomy_data": template_data, From b0571153785b1bf6626738e8bf4f29c54c74c38d Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Wed, 27 Jul 2022 20:01:36 +0800 Subject: [PATCH 586/785] add write-color-sets option in point cache --- .../maya/plugins/create/create_pointcache.py | 5 +++- .../defaults/project_settings/maya.json | 1 + .../schemas/schema_maya_create.json | 30 ++++++++++++++++--- 3 files changed, 31 insertions(+), 5 deletions(-) diff --git a/openpype/hosts/maya/plugins/create/create_pointcache.py b/openpype/hosts/maya/plugins/create/create_pointcache.py index e876015adb..0d71f2995d 100644 --- a/openpype/hosts/maya/plugins/create/create_pointcache.py +++ b/openpype/hosts/maya/plugins/create/create_pointcache.py @@ -12,13 +12,16 @@ class CreatePointCache(plugin.Creator): family = "pointcache" icon = "gears" + write_color_sets = False + + def __init__(self, *args, **kwargs): super(CreatePointCache, self).__init__(*args, **kwargs) # Add animation data self.data.update(lib.collect_animation_data()) - self.data["writeColorSets"] = False # Vertex colors with the geometry. + self.data["writeColorSets"] = self.write_color_sets # Vertex colors with the geometry. self.data["writeFaceSets"] = False # Vertex colors with the geometry. self.data["renderableOnly"] = False # Only renderable visible shapes self.data["visibleOnly"] = False # only nodes that are visible diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index 70bedf55d8..d8b107b709 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -136,6 +136,7 @@ }, "CreatePointCache": { "enabled": true, + "write_color_sets": false, "defaults": [ "Main" ] diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json index 9000b0246f..e0684597f5 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json @@ -168,6 +168,32 @@ } ] }, + { + "type": "dict", + "collapsible": true, + "key": "CreatePointCache", + "label": "Create Cache", + "checkbox_key": "enabled", + "children": [ + { + "type": "boolean", + "key": "enabled", + "label": "Enabled" + }, + { + "type": "boolean", + "key": "write_color_sets", + "label": "Write Color Sets" + }, + { + "type": "list", + "key": "defaults", + "label": "Default Subsets", + "object_type": "text" + } + ] + }, + { "type": "schema_template", "name": "template_create_plugin", @@ -208,10 +234,6 @@ "key": "CreateModel", "label": "Create Model" }, - { - "key": "CreatePointCache", - "label": "Create Cache" - }, { "key": "CreateRenderSetup", "label": "Create Render Setup" From 968151f3433ceed9fdf7ad9c793543ca493c26d8 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Wed, 27 Jul 2022 21:56:36 +0800 Subject: [PATCH 587/785] fix the name of Point Cache in the Project Setting --- openpype/hosts/maya/plugins/create/create_animation.py | 3 ++- openpype/hosts/maya/plugins/create/create_pointcache.py | 8 ++++---- .../projects_schema/schemas/schema_maya_create.json | 2 +- 3 files changed, 7 insertions(+), 6 deletions(-) diff --git a/openpype/hosts/maya/plugins/create/create_animation.py b/openpype/hosts/maya/plugins/create/create_animation.py index b7f473acef..7fc9c1e63e 100644 --- a/openpype/hosts/maya/plugins/create/create_animation.py +++ b/openpype/hosts/maya/plugins/create/create_animation.py @@ -11,7 +11,8 @@ class CreateAnimation(plugin.Creator): label = "Animation" family = "animation" icon = "male" - write_color_sets = False + + write_color_sets = False def __init__(self, *args, **kwargs): super(CreateAnimation, self).__init__(*args, **kwargs) diff --git a/openpype/hosts/maya/plugins/create/create_pointcache.py b/openpype/hosts/maya/plugins/create/create_pointcache.py index 0d71f2995d..0da781dfa0 100644 --- a/openpype/hosts/maya/plugins/create/create_pointcache.py +++ b/openpype/hosts/maya/plugins/create/create_pointcache.py @@ -11,9 +11,8 @@ class CreatePointCache(plugin.Creator): label = "Point Cache" family = "pointcache" icon = "gears" - - write_color_sets = False - + + write_color_sets = False def __init__(self, *args, **kwargs): super(CreatePointCache, self).__init__(*args, **kwargs) @@ -21,7 +20,8 @@ class CreatePointCache(plugin.Creator): # Add animation data self.data.update(lib.collect_animation_data()) - self.data["writeColorSets"] = self.write_color_sets # Vertex colors with the geometry. + # Vertex colors with the geometry. + self.data["writeColorSets"] = self.write_color_sets self.data["writeFaceSets"] = False # Vertex colors with the geometry. self.data["renderableOnly"] = False # Only renderable visible shapes self.data["visibleOnly"] = False # only nodes that are visible diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json index e0684597f5..2e4d8edef1 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json @@ -172,7 +172,7 @@ "type": "dict", "collapsible": true, "key": "CreatePointCache", - "label": "Create Cache", + "label": "Create Point Cache", "checkbox_key": "enabled", "children": [ { From 6568e9cc605a39264077d6158baa76bf50d454f9 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Wed, 27 Jul 2022 22:03:41 +0800 Subject: [PATCH 588/785] fix the name of Point Cache in Settings --- openpype/hosts/maya/plugins/create/create_animation.py | 2 +- openpype/hosts/maya/plugins/create/create_pointcache.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/maya/plugins/create/create_animation.py b/openpype/hosts/maya/plugins/create/create_animation.py index 7fc9c1e63e..31d4f968d1 100644 --- a/openpype/hosts/maya/plugins/create/create_animation.py +++ b/openpype/hosts/maya/plugins/create/create_animation.py @@ -11,7 +11,7 @@ class CreateAnimation(plugin.Creator): label = "Animation" family = "animation" icon = "male" - + write_color_sets = False def __init__(self, *args, **kwargs): diff --git a/openpype/hosts/maya/plugins/create/create_pointcache.py b/openpype/hosts/maya/plugins/create/create_pointcache.py index 0da781dfa0..1c83a9c20d 100644 --- a/openpype/hosts/maya/plugins/create/create_pointcache.py +++ b/openpype/hosts/maya/plugins/create/create_pointcache.py @@ -21,7 +21,7 @@ class CreatePointCache(plugin.Creator): self.data.update(lib.collect_animation_data()) # Vertex colors with the geometry. - self.data["writeColorSets"] = self.write_color_sets + self.data["writeColorSets"] = self.write_color_sets self.data["writeFaceSets"] = False # Vertex colors with the geometry. self.data["renderableOnly"] = False # Only renderable visible shapes self.data["visibleOnly"] = False # only nodes that are visible From 71a927d06ff6f6f407169aaffa2f79edb9b74199 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Wed, 27 Jul 2022 22:08:04 +0800 Subject: [PATCH 589/785] add write color sets to Settings and rename Create Cache to Create Point Cache in Settings --- openpype/hosts/maya/plugins/create/create_animation.py | 1 - openpype/hosts/maya/plugins/create/create_pointcache.py | 1 - 2 files changed, 2 deletions(-) diff --git a/openpype/hosts/maya/plugins/create/create_animation.py b/openpype/hosts/maya/plugins/create/create_animation.py index 31d4f968d1..e47d4e5b5a 100644 --- a/openpype/hosts/maya/plugins/create/create_animation.py +++ b/openpype/hosts/maya/plugins/create/create_animation.py @@ -11,7 +11,6 @@ class CreateAnimation(plugin.Creator): label = "Animation" family = "animation" icon = "male" - write_color_sets = False def __init__(self, *args, **kwargs): diff --git a/openpype/hosts/maya/plugins/create/create_pointcache.py b/openpype/hosts/maya/plugins/create/create_pointcache.py index 1c83a9c20d..5516445de8 100644 --- a/openpype/hosts/maya/plugins/create/create_pointcache.py +++ b/openpype/hosts/maya/plugins/create/create_pointcache.py @@ -11,7 +11,6 @@ class CreatePointCache(plugin.Creator): label = "Point Cache" family = "pointcache" icon = "gears" - write_color_sets = False def __init__(self, *args, **kwargs): From 4379dc019e4069ca44240aec565c1d136879f1a8 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 27 Jul 2022 18:05:32 +0200 Subject: [PATCH 590/785] OP-3283 - implemented proper usage of {layer} in subset template for legacy creator {layer} placeholder could be used in project_settings/global/tools/creator/subset_name_profiles to drive lower/upper cases when layer is used in subset name (eg. when multiple subsets are created at once). Warning {layer} means keep layer name as it is, not lowercasing! --- .../plugins/create/create_legacy_image.py | 49 ++++++++++++++++++- 1 file changed, 48 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/photoshop/plugins/create/create_legacy_image.py b/openpype/hosts/photoshop/plugins/create/create_legacy_image.py index 9736471a26..142cddfd52 100644 --- a/openpype/hosts/photoshop/plugins/create/create_legacy_image.py +++ b/openpype/hosts/photoshop/plugins/create/create_legacy_image.py @@ -1,6 +1,11 @@ from Qt import QtWidgets from openpype.pipeline import create from openpype.hosts.photoshop import api as photoshop +from openpype.pipeline import legacy_io +from openpype.client import get_asset_by_name +from openpype.settings import get_project_settings +from openpype.lib import prepare_template_data +from openpype.lib.profiles_filtering import filter_profiles class CreateImage(create.LegacyCreator): @@ -82,7 +87,18 @@ class CreateImage(create.LegacyCreator): subset_name = creator_subset_name if len(groups) > 1: - subset_name += group.name.title().replace(" ", "") + subset_template = self._get_subset_template(self.family) + if not subset_template or 'layer' not in subset_template.lower(): + subset_name += group.name.title().replace(" ", "") + else: + fill_pairs = { + "variant": self.data["variant"], + "family": self.family, + "task": legacy_io.Session["AVALON_TASK"], + "layer": group.name + } + + subset_name = subset_template.format(**prepare_template_data(fill_pairs)) if group.long_name: for directory in group.long_name[::-1]: @@ -98,3 +114,34 @@ class CreateImage(create.LegacyCreator): # reusing existing group, need to rename afterwards if not create_group: stub.rename_layer(group.id, stub.PUBLISH_ICON + group.name) + + @classmethod + def get_dynamic_data( + cls, variant, task_name, asset_id, project_name, host_name + ): + return {"layer": ""} + + def _get_subset_template(self, family): + project_name = legacy_io.Session["AVALON_PROJECT"] + asset_name = legacy_io.Session["AVALON_ASSET"] + task_name = legacy_io.Session["AVALON_TASK"] + + asset_doc = get_asset_by_name( + project_name, asset_name, fields=["data.tasks"] + ) + asset_tasks = asset_doc.get("data", {}).get("tasks") or {} + task_info = asset_tasks.get(task_name) or {} + task_type = task_info.get("type") + + tools_settings = get_project_settings(project_name)["global"]["tools"] + profiles = tools_settings["creator"]["subset_name_profiles"] + filtering_criteria = { + "families": family, + "hosts": "photoshop", + "tasks": task_name, + "task_types": task_type + } + + matching_profile = filter_profiles(profiles, filtering_criteria) + if matching_profile: + return matching_profile["template"] From 4c849e8d86e7665cc4ee3e235403f2baf41e8b84 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Wed, 27 Jul 2022 18:14:22 +0200 Subject: [PATCH 591/785] :bug: fix environment resolution this will fix environment resolution of general settings in one pass --- start.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/start.py b/start.py index e83589d160..cbf8ffd178 100644 --- a/start.py +++ b/start.py @@ -270,8 +270,11 @@ def set_openpype_global_environments() -> None: general_env = get_general_environments() + # first resolve general environment because merge doesn't expect + # values to be list. + # TODO: switch to OpenPype environment functions merged_env = acre.merge( - acre.parse(general_env), + acre.compute(acre.parse(general_env), cleanup=False), dict(os.environ) ) env = acre.compute( From 52314b0bf514f58c042c2a7c7bdd9d45a24ae2e9 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 27 Jul 2022 19:03:08 +0200 Subject: [PATCH 592/785] update ftrack api to 2.3.3 --- openpype/modules/ftrack/ftrack_server/lib.py | 21 +++++++++++++++++--- poetry.lock | 20 +++++++++---------- pyproject.toml | 2 +- 3 files changed, 28 insertions(+), 15 deletions(-) diff --git a/openpype/modules/ftrack/ftrack_server/lib.py b/openpype/modules/ftrack/ftrack_server/lib.py index 3da1e7c7f0..947dacf917 100644 --- a/openpype/modules/ftrack/ftrack_server/lib.py +++ b/openpype/modules/ftrack/ftrack_server/lib.py @@ -7,6 +7,7 @@ import threading import datetime import time import queue +import collections import appdirs import pymongo @@ -309,7 +310,20 @@ class CustomEventHubSession(ftrack_api.session.Session): # Currently pending operations. self.recorded_operations = ftrack_api.operation.Operations() - self.record_operations = True + + # OpenPype change - In new API are operations properties + new_api = hasattr(self.__class__, "record_operations") + + if new_api: + self._record_operations = collections.defaultdict( + lambda: True + ) + self._auto_populate = collections.defaultdict( + lambda: auto_populate + ) + else: + self.record_operations = True + self.auto_populate = auto_populate self.cache_key_maker = cache_key_maker if self.cache_key_maker is None: @@ -328,6 +342,9 @@ class CustomEventHubSession(ftrack_api.session.Session): if cache is not None: self.cache.caches.append(cache) + if new_api: + self.merge_lock = threading.RLock() + self._managed_request = None self._request = requests.Session() self._request.auth = ftrack_api.session.SessionAuthentication( @@ -335,8 +352,6 @@ class CustomEventHubSession(ftrack_api.session.Session): ) self.request_timeout = timeout - self.auto_populate = auto_populate - # Fetch server information and in doing so also check credentials. self._server_information = self._fetch_server_information() diff --git a/poetry.lock b/poetry.lock index 0033bc0d73..33deab003e 100644 --- a/poetry.lock +++ b/poetry.lock @@ -221,7 +221,7 @@ python-versions = "~=3.7" [[package]] name = "certifi" -version = "2022.5.18.1" +version = "2022.6.15" description = "Python package for providing Mozilla's CA Bundle." category = "main" optional = false @@ -456,19 +456,20 @@ python-versions = ">=3.7" [[package]] name = "ftrack-python-api" -version = "2.0.0" +version = "2.3.3" description = "Python API for ftrack." category = "main" optional = false -python-versions = ">=2.7.9, <4.0" +python-versions = ">=2.7.9, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, < 3.10" [package.dependencies] +appdirs = ">=1,<2" arrow = ">=0.4.4,<1" -clique = ">=1.2.0,<2" +clique = "1.6.1" future = ">=0.16.0,<1" pyparsing = ">=2.0,<3" requests = ">=2,<3" -six = ">=1,<2" +six = ">=1.13.0,<2" termcolor = ">=1.1.0,<2" websocket-client = ">=0.40.0,<1" @@ -1885,8 +1886,8 @@ cachetools = [ {file = "cachetools-5.2.0.tar.gz", hash = "sha256:6a94c6402995a99c3970cc7e4884bb60b4a8639938157eeed436098bf9831757"}, ] certifi = [ - {file = "certifi-2022.5.18.1-py3-none-any.whl", hash = "sha256:f1d53542ee8cbedbe2118b5686372fb33c297fcd6379b050cca0ef13a597382a"}, - {file = "certifi-2022.5.18.1.tar.gz", hash = "sha256:9c5705e395cd70084351dd8ad5c41e65655e08ce46f2ec9cf6c2c08390f71eb7"}, + {file = "certifi-2022.6.15-py3-none-any.whl", hash = "sha256:fe86415d55e84719d75f8b69414f6438ac3547d2078ab91b67e779ef69378412"}, + {file = "certifi-2022.6.15.tar.gz", hash = "sha256:84c85a9078b11105f04f3036a9482ae10e4621616db313fe045dd24743a0820d"}, ] cffi = [ {file = "cffi-1.15.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:c2502a1a03b6312837279c8c1bd3ebedf6c12c4228ddbad40912d671ccc8a962"}, @@ -2152,10 +2153,7 @@ frozenlist = [ {file = "frozenlist-1.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:772965f773757a6026dea111a15e6e2678fbd6216180f82a48a40b27de1ee2ab"}, {file = "frozenlist-1.3.0.tar.gz", hash = "sha256:ce6f2ba0edb7b0c1d8976565298ad2deba6f8064d2bebb6ffce2ca896eb35b0b"}, ] -ftrack-python-api = [ - {file = "ftrack-python-api-2.0.0.tar.gz", hash = "sha256:dd6f02c31daf5a10078196dc9eac4671e4297c762fbbf4df98de668ac12281d9"}, - {file = "ftrack_python_api-2.0.0-py2.py3-none-any.whl", hash = "sha256:d0df0f2df4b53947272f95e179ec98b477ee425bf4217b37bb59030ad989771e"}, -] +ftrack-python-api = [] future = [ {file = "future-0.18.2.tar.gz", hash = "sha256:b1bead90b70cf6ec3f0710ae53a525360fa360d306a86583adc6bf83a4db537d"}, ] diff --git a/pyproject.toml b/pyproject.toml index 1627b5e1c1..5785c7635b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -39,7 +39,7 @@ coolname = "*" clique = "1.6.*" Click = "^7" dnspython = "^2.1.0" -ftrack-python-api = "2.0.*" +ftrack-python-api = "^2.3.3" shotgun_api3 = {git = "https://github.com/shotgunsoftware/python-api.git", rev = "v3.3.3"} gazu = "^0.8.28" google-api-python-client = "^1.12.8" # sync server google support (should be separate?) From 3e7a9d3e468ebb7b9149fb3b5d7c1fed200732b6 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 27 Jul 2022 19:04:22 +0200 Subject: [PATCH 593/785] use master branch of appdirs --- poetry.lock | 14 +++++++++----- pyproject.toml | 2 +- 2 files changed, 10 insertions(+), 6 deletions(-) diff --git a/poetry.lock b/poetry.lock index 0033bc0d73..72e5763c9c 100644 --- a/poetry.lock +++ b/poetry.lock @@ -92,7 +92,14 @@ version = "1.4.4" description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." category = "main" optional = false -python-versions = "*" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +develop = false + +[package.source] +type = "git" +url = "https://github.com/ActiveState/appdirs.git" +reference = "master" +resolved_reference = "193a2cbba58cce2542882fcedd0e49f6763672ed" [[package]] name = "arrow" @@ -1827,10 +1834,7 @@ ansicon = [ {file = "ansicon-1.89.0-py2.py3-none-any.whl", hash = "sha256:f1def52d17f65c2c9682cf8370c03f541f410c1752d6a14029f97318e4b9dfec"}, {file = "ansicon-1.89.0.tar.gz", hash = "sha256:e4d039def5768a47e4afec8e89e83ec3ae5a26bf00ad851f914d1240b444d2b1"}, ] -appdirs = [ - {file = "appdirs-1.4.4-py2.py3-none-any.whl", hash = "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128"}, - {file = "appdirs-1.4.4.tar.gz", hash = "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41"}, -] +appdirs = [] arrow = [ {file = "arrow-0.17.0-py2.py3-none-any.whl", hash = "sha256:e098abbd9af3665aea81bdd6c869e93af4feb078e98468dd351c383af187aac5"}, {file = "arrow-0.17.0.tar.gz", hash = "sha256:ff08d10cda1d36c68657d6ad20d74fbea493d980f8b2d45344e00d6ed2bf6ed4"}, diff --git a/pyproject.toml b/pyproject.toml index 1627b5e1c1..4361c8c9f2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -33,7 +33,7 @@ aiohttp = "^3.7" aiohttp_json_rpc = "*" # TVPaint server acre = { git = "https://github.com/pypeclub/acre.git" } opentimelineio = { version = "0.14.0.dev1", source = "openpype" } -appdirs = "^1.4.3" +appdirs = { git = "https://github.com/ActiveState/appdirs.git", branch = "master" } blessed = "^1.17" # openpype terminal formatting coolname = "*" clique = "1.6.*" From a1122496c1c57e62a6a1118cee0fbcc20d4eec1e Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 28 Jul 2022 10:46:25 +0200 Subject: [PATCH 594/785] add missing project tasks into fields --- openpype/pipeline/template_data.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/openpype/pipeline/template_data.py b/openpype/pipeline/template_data.py index de46650f9d..824a25127c 100644 --- a/openpype/pipeline/template_data.py +++ b/openpype/pipeline/template_data.py @@ -213,7 +213,9 @@ def get_template_data_with_names( Dict[str, Any]: Data prepared for filling workdir template. """ - project_doc = get_project(project_name, fields=["name", "data.code"]) + project_doc = get_project( + project_name, fields=["name", "data.code", "config.tasks"] + ) asset_doc = None if asset_name: asset_doc = get_asset_by_name( From 7adb8453861ce29f095082494ced13b755921fc5 Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Thu, 28 Jul 2022 12:24:46 +0300 Subject: [PATCH 595/785] Add OCIO submodule. --- .gitmodules | 3 +++ vendor/configs/OpenColorIO-Configs | 1 + 2 files changed, 4 insertions(+) create mode 160000 vendor/configs/OpenColorIO-Configs diff --git a/.gitmodules b/.gitmodules index dfd89cdb3c..bac3132b77 100644 --- a/.gitmodules +++ b/.gitmodules @@ -5,3 +5,6 @@ [submodule "tools/modules/powershell/PSWriteColor"] path = tools/modules/powershell/PSWriteColor url = https://github.com/EvotecIT/PSWriteColor.git +[submodule "vendor/configs/OpenColorIO-Configs"] + path = vendor/configs/OpenColorIO-Configs + url = https://github.com/imageworks/OpenColorIO-Configs diff --git a/vendor/configs/OpenColorIO-Configs b/vendor/configs/OpenColorIO-Configs new file mode 160000 index 0000000000..0bb079c08b --- /dev/null +++ b/vendor/configs/OpenColorIO-Configs @@ -0,0 +1 @@ +Subproject commit 0bb079c08be410030669cbf5f19ff869b88af953 From 86070835b9883b46baa27e12bb079b9866b18356 Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Thu, 28 Jul 2022 12:33:29 +0300 Subject: [PATCH 596/785] Add OCIO path function. --- .../maya/plugins/publish/extract_look.py | 22 +++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/openpype/hosts/maya/plugins/publish/extract_look.py b/openpype/hosts/maya/plugins/publish/extract_look.py index d35b529c76..ce699d3d9a 100644 --- a/openpype/hosts/maya/plugins/publish/extract_look.py +++ b/openpype/hosts/maya/plugins/publish/extract_look.py @@ -534,3 +534,25 @@ class ExtractModelRenderSets(ExtractLook): self.scene_type = self.scene_type_prefix + self.scene_type return typ + + +def get_ocio_config_path(profile_folder): + """Path to OpenPype vendorized OCIO. + + Vendorized OCIO config file path is grabbed from the specific path + hierarchy specified below. + + "{OPENPYPE_ROOT}/vendor/OpenColorIO-Configs/{profile_folder}/config.ocio" + Args: + profile_folder (str): Name of folder to grab config file from. + + Returns: + str: Path to vendorized config file. + """ + return os.path.join( + os.environ["OPENPYPE_ROOT"], + "vendor", + "OpenColorIO-Configs", + profile_folder, + "config.ocio" + ) From 03767d28912b65a47b66826cc359a6db0baf4533 Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Thu, 28 Jul 2022 13:03:37 +0300 Subject: [PATCH 597/785] move function --- .../maya/plugins/publish/extract_look.py | 45 +++++++++---------- 1 file changed, 22 insertions(+), 23 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_look.py b/openpype/hosts/maya/plugins/publish/extract_look.py index ce699d3d9a..42d4835fdf 100644 --- a/openpype/hosts/maya/plugins/publish/extract_look.py +++ b/openpype/hosts/maya/plugins/publish/extract_look.py @@ -27,6 +27,28 @@ def escape_space(path): return '"{}"'.format(path) if " " in path else path +def get_ocio_config_path(profile_folder): + """Path to OpenPype vendorized OCIO. + + Vendorized OCIO config file path is grabbed from the specific path + hierarchy specified below. + + "{OPENPYPE_ROOT}/vendor/OpenColorIO-Configs/{profile_folder}/config.ocio" + Args: + profile_folder (str): Name of folder to grab config file from. + + Returns: + str: Path to vendorized config file. + """ + return os.path.join( + os.environ["OPENPYPE_ROOT"], + "vendor", + "OpenColorIO-Configs", + profile_folder, + "config.ocio" + ) + + def find_paths_by_hash(texture_hash): """Find the texture hash key in the dictionary. @@ -492,7 +514,6 @@ class ExtractLook(openpype.api.Extractor): colorconvert = "--colorconvert sRGB linear" else: colorconvert = "" - # Ensure folder exists if not os.path.exists(os.path.dirname(converted)): os.makedirs(os.path.dirname(converted)) @@ -534,25 +555,3 @@ class ExtractModelRenderSets(ExtractLook): self.scene_type = self.scene_type_prefix + self.scene_type return typ - - -def get_ocio_config_path(profile_folder): - """Path to OpenPype vendorized OCIO. - - Vendorized OCIO config file path is grabbed from the specific path - hierarchy specified below. - - "{OPENPYPE_ROOT}/vendor/OpenColorIO-Configs/{profile_folder}/config.ocio" - Args: - profile_folder (str): Name of folder to grab config file from. - - Returns: - str: Path to vendorized config file. - """ - return os.path.join( - os.environ["OPENPYPE_ROOT"], - "vendor", - "OpenColorIO-Configs", - profile_folder, - "config.ocio" - ) From cd7ef426d891381de1c8d4e028c967793784d130 Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Thu, 28 Jul 2022 13:31:39 +0300 Subject: [PATCH 598/785] Add configuration variable to `maketx` --- openpype/hosts/maya/plugins/publish/extract_look.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_look.py b/openpype/hosts/maya/plugins/publish/extract_look.py index 42d4835fdf..faea0247da 100644 --- a/openpype/hosts/maya/plugins/publish/extract_look.py +++ b/openpype/hosts/maya/plugins/publish/extract_look.py @@ -514,6 +514,9 @@ class ExtractLook(openpype.api.Extractor): colorconvert = "--colorconvert sRGB linear" else: colorconvert = "" + + config_path = get_ocio_config_path("nuke-default") + color_config = "--colorconfig {0}".format(config_path) # Ensure folder exists if not os.path.exists(os.path.dirname(converted)): os.makedirs(os.path.dirname(converted)) @@ -523,10 +526,11 @@ class ExtractLook(openpype.api.Extractor): filepath, converted, # Include `source-hash` as string metadata - "-sattrib", + "--sattrib", "sourceHash", escape_space(texture_hash), colorconvert, + color_config ) return converted, COPY, texture_hash From 81f3bd379b34acb9727a9ab6ad621a87e9bcb9b1 Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Thu, 28 Jul 2022 13:31:58 +0300 Subject: [PATCH 599/785] Fix function path bug --- openpype/hosts/maya/plugins/publish/extract_look.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/hosts/maya/plugins/publish/extract_look.py b/openpype/hosts/maya/plugins/publish/extract_look.py index faea0247da..f71a01e474 100644 --- a/openpype/hosts/maya/plugins/publish/extract_look.py +++ b/openpype/hosts/maya/plugins/publish/extract_look.py @@ -43,6 +43,7 @@ def get_ocio_config_path(profile_folder): return os.path.join( os.environ["OPENPYPE_ROOT"], "vendor", + "config", "OpenColorIO-Configs", profile_folder, "config.ocio" From 8c95aab796ec3cc284851b2d1c3170ead24a22b7 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 28 Jul 2022 12:41:49 +0200 Subject: [PATCH 600/785] OP-3283 - extracted logic to plugin to reuse --- openpype/hosts/photoshop/api/plugin.py | 53 +++++++++++++++++++ .../plugins/create/create_legacy_image.py | 50 +++-------------- 2 files changed, 61 insertions(+), 42 deletions(-) diff --git a/openpype/hosts/photoshop/api/plugin.py b/openpype/hosts/photoshop/api/plugin.py index c80e6bbd06..ecbfbf91e3 100644 --- a/openpype/hosts/photoshop/api/plugin.py +++ b/openpype/hosts/photoshop/api/plugin.py @@ -2,6 +2,11 @@ import re from openpype.pipeline import LoaderPlugin from .launch_logic import stub +from openpype.pipeline import legacy_io +from openpype.client import get_asset_by_name +from openpype.settings import get_project_settings +from openpype.lib import prepare_template_data +from openpype.lib.profiles_filtering import filter_profiles def get_unique_layer_name(layers, asset_name, subset_name): @@ -33,3 +38,51 @@ class PhotoshopLoader(LoaderPlugin): @staticmethod def get_stub(): return stub() + + +def get_subset_template(family): + """Get subset template name from Settings""" + project_name = legacy_io.Session["AVALON_PROJECT"] + asset_name = legacy_io.Session["AVALON_ASSET"] + task_name = legacy_io.Session["AVALON_TASK"] + + asset_doc = get_asset_by_name( + project_name, asset_name, fields=["data.tasks"] + ) + asset_tasks = asset_doc.get("data", {}).get("tasks") or {} + task_info = asset_tasks.get(task_name) or {} + task_type = task_info.get("type") + + tools_settings = get_project_settings(project_name)["global"]["tools"] + profiles = tools_settings["creator"]["subset_name_profiles"] + filtering_criteria = { + "families": family, + "hosts": "photoshop", + "tasks": task_name, + "task_types": task_type + } + + matching_profile = filter_profiles(profiles, filtering_criteria) + if matching_profile: + return matching_profile["template"] + + +def get_subset_name_for_multiple(subset_name, subset_template, group, + family, variant): + """Update subset name with layer information to differentiate multiple + + subset_template might contain specific way how to format layer name + ({layer},{Layer} or {LAYER}). If subset_template doesn't contain placeholder + at all, fall back to original solution. + """ + if not subset_template or 'layer' not in subset_template.lower(): + subset_name += group.name.title().replace(" ", "") + else: + fill_pairs = { + "family": family, + "variant": variant, + "task": legacy_io.Session["AVALON_TASK"], + "layer": group.name + } + + return subset_template.format(**prepare_template_data(fill_pairs)) diff --git a/openpype/hosts/photoshop/plugins/create/create_legacy_image.py b/openpype/hosts/photoshop/plugins/create/create_legacy_image.py index 142cddfd52..6d0587c20c 100644 --- a/openpype/hosts/photoshop/plugins/create/create_legacy_image.py +++ b/openpype/hosts/photoshop/plugins/create/create_legacy_image.py @@ -1,11 +1,8 @@ from Qt import QtWidgets from openpype.pipeline import create from openpype.hosts.photoshop import api as photoshop -from openpype.pipeline import legacy_io -from openpype.client import get_asset_by_name -from openpype.settings import get_project_settings -from openpype.lib import prepare_template_data -from openpype.lib.profiles_filtering import filter_profiles + +from openpype.hosts.photoshop.api.plugin import get_subset_template, get_subset_name_for_multiple class CreateImage(create.LegacyCreator): @@ -87,18 +84,12 @@ class CreateImage(create.LegacyCreator): subset_name = creator_subset_name if len(groups) > 1: - subset_template = self._get_subset_template(self.family) - if not subset_template or 'layer' not in subset_template.lower(): - subset_name += group.name.title().replace(" ", "") - else: - fill_pairs = { - "variant": self.data["variant"], - "family": self.family, - "task": legacy_io.Session["AVALON_TASK"], - "layer": group.name - } - - subset_name = subset_template.format(**prepare_template_data(fill_pairs)) + subset_template = get_subset_template(self.family) + subset_name = get_subset_name_for_multiple(subset_name, + subset_template, + group, + self.family, + self.data["variant"]) if group.long_name: for directory in group.long_name[::-1]: @@ -120,28 +111,3 @@ class CreateImage(create.LegacyCreator): cls, variant, task_name, asset_id, project_name, host_name ): return {"layer": ""} - - def _get_subset_template(self, family): - project_name = legacy_io.Session["AVALON_PROJECT"] - asset_name = legacy_io.Session["AVALON_ASSET"] - task_name = legacy_io.Session["AVALON_TASK"] - - asset_doc = get_asset_by_name( - project_name, asset_name, fields=["data.tasks"] - ) - asset_tasks = asset_doc.get("data", {}).get("tasks") or {} - task_info = asset_tasks.get(task_name) or {} - task_type = task_info.get("type") - - tools_settings = get_project_settings(project_name)["global"]["tools"] - profiles = tools_settings["creator"]["subset_name_profiles"] - filtering_criteria = { - "families": family, - "hosts": "photoshop", - "tasks": task_name, - "task_types": task_type - } - - matching_profile = filter_profiles(profiles, filtering_criteria) - if matching_profile: - return matching_profile["template"] From e287e1fd48af95c6bd5822e6d0f93d37b7896080 Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Thu, 28 Jul 2022 13:44:19 +0300 Subject: [PATCH 601/785] Fix bugs --- openpype/hosts/maya/plugins/publish/extract_look.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_look.py b/openpype/hosts/maya/plugins/publish/extract_look.py index f71a01e474..0b26e922d5 100644 --- a/openpype/hosts/maya/plugins/publish/extract_look.py +++ b/openpype/hosts/maya/plugins/publish/extract_look.py @@ -43,7 +43,7 @@ def get_ocio_config_path(profile_folder): return os.path.join( os.environ["OPENPYPE_ROOT"], "vendor", - "config", + "configs", "OpenColorIO-Configs", profile_folder, "config.ocio" @@ -102,10 +102,11 @@ def maketx(source, destination, *args): # use oiio-optimized settings for tile-size, planarconfig, metadata "--oiio", "--filter lanczos3", + escape_space(source) ] cmd.extend(args) - cmd.extend(["-o", escape_space(destination), escape_space(source)]) + cmd.extend(["-o", escape_space(destination)]) cmd = " ".join(cmd) From 87cf386a54917adacfd91542cd3613ac0fe4babc Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 28 Jul 2022 12:52:22 +0200 Subject: [PATCH 602/785] OP-3283 - implemented for new creator --- .../photoshop/plugins/create/create_image.py | 17 ++++++++++++++++- 1 file changed, 16 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/photoshop/plugins/create/create_image.py b/openpype/hosts/photoshop/plugins/create/create_image.py index f15068b031..ebb268dc93 100644 --- a/openpype/hosts/photoshop/plugins/create/create_image.py +++ b/openpype/hosts/photoshop/plugins/create/create_image.py @@ -5,6 +5,10 @@ from openpype.pipeline import ( CreatedInstance, legacy_io ) +from openpype.hosts.photoshop.api.plugin import ( + get_subset_template, + get_subset_name_for_multiple +) class ImageCreator(Creator): @@ -68,7 +72,12 @@ class ImageCreator(Creator): if creating_multiple_groups: # concatenate with layer name to differentiate subsets - subset_name += group.name.title().replace(" ", "") + subset_template = get_subset_template(self.family) + subset_name = get_subset_name_for_multiple(subset_name, + subset_template, + group, + self.family, + data["variant"]) if group.long_name: for directory in group.long_name[::-1]: @@ -143,3 +152,9 @@ class ImageCreator(Creator): def _clean_highlights(self, stub, item): return item.replace(stub.PUBLISH_ICON, '').replace(stub.LOADED_ICON, '') + @classmethod + def get_dynamic_data( + cls, variant, task_name, asset_id, project_name, host_name + ): + """Called by UI, empty value for layer must be provided.""" + return {"layer": ""} From a03f2b6a1a6ee24692e25710d55fd0af11eecb96 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 28 Jul 2022 12:53:20 +0200 Subject: [PATCH 603/785] OP-3283 - fixed imports --- .../hosts/photoshop/plugins/create/create_legacy_image.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/photoshop/plugins/create/create_legacy_image.py b/openpype/hosts/photoshop/plugins/create/create_legacy_image.py index 6d0587c20c..d1a54a407e 100644 --- a/openpype/hosts/photoshop/plugins/create/create_legacy_image.py +++ b/openpype/hosts/photoshop/plugins/create/create_legacy_image.py @@ -2,7 +2,10 @@ from Qt import QtWidgets from openpype.pipeline import create from openpype.hosts.photoshop import api as photoshop -from openpype.hosts.photoshop.api.plugin import get_subset_template, get_subset_name_for_multiple +from openpype.hosts.photoshop.api.plugin import ( + get_subset_template, + get_subset_name_for_multiple +) class CreateImage(create.LegacyCreator): From bc2cec540c8b7962d3b0c0fc8dabe5f6cf54fb36 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 28 Jul 2022 16:29:50 +0200 Subject: [PATCH 604/785] trayp: improving user feedback --- openpype/hosts/traypublisher/api/editorial.py | 22 +++++++++++-------- 1 file changed, 13 insertions(+), 9 deletions(-) diff --git a/openpype/hosts/traypublisher/api/editorial.py b/openpype/hosts/traypublisher/api/editorial.py index 92ad65a851..7c392ef508 100644 --- a/openpype/hosts/traypublisher/api/editorial.py +++ b/openpype/hosts/traypublisher/api/editorial.py @@ -55,7 +55,7 @@ class ShotMetadataSolver: return shot_rename_template.format(**data) except KeyError as _E: raise CreatorError(( - "Make sure all keys are correct in settings: \n\n" + "Make sure all keys in settings are correct:: \n\n" f"From template string {shot_rename_template} > " f"`{_E}` has no equivalent in \n" f"{list(data.keys())} input formating keys!" @@ -91,10 +91,13 @@ class ShotMetadataSolver: match = p.findall(search_text) if not match: raise CreatorError(( - "Make sure regex expression is correct: \n\n" - f"From settings '{token_key}' key " - f"with '{pattern}' expression, \n" - f"is not able to find anything in '{search_text}'!" + "Make sure regex expression works with your data: \n\n" + f"'{token_key}' with regex '{pattern}' in your settings\n" + "can't find any match in your clip name " + f"'{search_text}'!\n\nLook to: " + "'project_settings/traypublisher/editorial_creators" + "/editorial_simple/clip_name_tokenizer'\n" + "at your project settings..." )) # QUESTION:how to refactory `match[-1]` to some better way? @@ -129,7 +132,7 @@ class ShotMetadataSolver: } except KeyError as _E: raise CreatorError(( - "Make sure all keys are correct in settings: \n" + "Make sure all keys in settings are correct : \n" f"`{_E}` has no equivalent in \n{list(data.keys())}" )) @@ -146,9 +149,10 @@ class ShotMetadataSolver: **_parent_tokens_formating_data) except KeyError as _E: raise CreatorError(( - "Make sure all keys are correct in settings: \n\n" - f"From template string {shot_hierarchy['parents_path']} > " - f"`{_E}` has no equivalent in \n" + "Make sure all keys in settings are correct : \n\n" + f"`{_E}` from template string " + f"{shot_hierarchy['parents_path']}, " + f" has no equivalent in \n" f"{list(_parent_tokens_formating_data.keys())} parents" )) From 443c5a369619a907f83c9bdb43783ce64d9edc0e Mon Sep 17 00:00:00 2001 From: Felix David Date: Thu, 28 Jul 2022 17:01:54 +0200 Subject: [PATCH 605/785] Fix: Shot&Sequence name with prefix over appends --- openpype/modules/kitsu/utils/update_op_with_zou.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/modules/kitsu/utils/update_op_with_zou.py b/openpype/modules/kitsu/utils/update_op_with_zou.py index 02c27382eb..040d6566f7 100644 --- a/openpype/modules/kitsu/utils/update_op_with_zou.py +++ b/openpype/modules/kitsu/utils/update_op_with_zou.py @@ -230,9 +230,9 @@ def update_op_assets( if item_type in ["Shot", "Sequence"]: # Name with parents hierarchy "({episode}_){sequence}_{shot}" # to avoid duplicate name issue - item_name = "_".join(item_data["parents"] + [item_doc["name"]]) + item_name = f"{item_data['parents'][-1]}_{item['name']}" else: - item_name = item_doc["name"] + item_name = item["name"] # Set root folders parents item_data["parents"] = entity_parent_folders + item_data["parents"] From 037c5a13cddc3e9426ebfe2c46ec6abc82eb559f Mon Sep 17 00:00:00 2001 From: kaamaurice Date: Thu, 28 Jul 2022 18:46:47 +0200 Subject: [PATCH 606/785] bugfix blender ops for workfiles dialog --- openpype/hosts/blender/api/ops.py | 13 +++++-------- 1 file changed, 5 insertions(+), 8 deletions(-) diff --git a/openpype/hosts/blender/api/ops.py b/openpype/hosts/blender/api/ops.py index c1b5add518..4f8410da74 100644 --- a/openpype/hosts/blender/api/ops.py +++ b/openpype/hosts/blender/api/ops.py @@ -220,12 +220,9 @@ class LaunchQtApp(bpy.types.Operator): self._app.store_window(self.bl_idname, window) self._window = window - if not isinstance( - self._window, - (QtWidgets.QMainWindow, QtWidgets.QDialog, ModuleType) - ): + if not isinstance(self._window, (QtWidgets.QWidget, ModuleType)): raise AttributeError( - "`window` should be a `QDialog or module`. Got: {}".format( + "`window` should be a `QWidget or module`. Got: {}".format( str(type(window)) ) ) @@ -249,9 +246,9 @@ class LaunchQtApp(bpy.types.Operator): self._window.setWindowFlags(on_top_flags) self._window.show() - if on_top_flags != origin_flags: - self._window.setWindowFlags(origin_flags) - self._window.show() + # if on_top_flags != origin_flags: + # self._window.setWindowFlags(origin_flags) + # self._window.show() return {'FINISHED'} From 44da89dc8669df3c5a26575c9cc80b1e7ca8f5e6 Mon Sep 17 00:00:00 2001 From: Felix David Date: Thu, 28 Jul 2022 18:59:46 +0200 Subject: [PATCH 607/785] Fix: project with no dedicated task types doesn't take defaults --- openpype/modules/kitsu/utils/update_op_with_zou.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/modules/kitsu/utils/update_op_with_zou.py b/openpype/modules/kitsu/utils/update_op_with_zou.py index 040d6566f7..8f5566e8ec 100644 --- a/openpype/modules/kitsu/utils/update_op_with_zou.py +++ b/openpype/modules/kitsu/utils/update_op_with_zou.py @@ -276,7 +276,7 @@ def write_project_to_op(project: dict, dbcon: AvalonMongoDB) -> UpdateOne: project_doc = create_project(project_name, project_name, dbcon=dbcon) # Project data and tasks - project_data = project["data"] or {} + project_data = project_doc["data"] or {} # Build project code and update Kitsu project_code = project.get("code") @@ -305,6 +305,7 @@ def write_project_to_op(project: dict, dbcon: AvalonMongoDB) -> UpdateOne: "config.tasks": { t["name"]: {"short_name": t.get("short_name", t["name"])} for t in gazu.task.all_task_types_for_project(project) + or gazu.task.all_task_types() }, "data": project_data, } From ad4aeb0071e7ee92a592e7de53fb24a230a13bc8 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 29 Jul 2022 10:38:01 +0200 Subject: [PATCH 608/785] use query functions on remaining places --- openpype/hooks/pre_global_host_data.py | 8 +++----- .../hosts/fusion/scripts/fusion_switch_shot.py | 8 -------- openpype/hosts/testhost/api/pipeline.py | 8 ++++---- .../testhost/plugins/create/auto_creator.py | 13 ++++--------- openpype/pipeline/create/context.py | 14 +++++--------- openpype/pipeline/thumbnail.py | 10 +++------- openpype/scripts/fusion_switch_shot.py | 18 ++++++------------ 7 files changed, 25 insertions(+), 54 deletions(-) diff --git a/openpype/hooks/pre_global_host_data.py b/openpype/hooks/pre_global_host_data.py index 6577e37cbe..8a178915fb 100644 --- a/openpype/hooks/pre_global_host_data.py +++ b/openpype/hooks/pre_global_host_data.py @@ -1,3 +1,4 @@ +from openpype.client import get_project, get_asset_by_name from openpype.lib import ( PreLaunchHook, EnvironmentPrepData, @@ -69,7 +70,7 @@ class GlobalHostDataHook(PreLaunchHook): self.data["dbcon"] = dbcon # Project document - project_doc = dbcon.find_one({"type": "project"}) + project_doc = get_project(project_name) self.data["project_doc"] = project_doc asset_name = self.data.get("asset_name") @@ -79,8 +80,5 @@ class GlobalHostDataHook(PreLaunchHook): ) return - asset_doc = dbcon.find_one({ - "type": "asset", - "name": asset_name - }) + asset_doc = get_asset_by_name(project_name, asset_name) self.data["asset_doc"] = asset_doc diff --git a/openpype/hosts/fusion/scripts/fusion_switch_shot.py b/openpype/hosts/fusion/scripts/fusion_switch_shot.py index 52a157c56e..87ff8e2ffe 100644 --- a/openpype/hosts/fusion/scripts/fusion_switch_shot.py +++ b/openpype/hosts/fusion/scripts/fusion_switch_shot.py @@ -3,9 +3,7 @@ import re import sys import logging -# Pipeline imports from openpype.client import ( - get_project, get_asset_by_name, get_versions, ) @@ -21,9 +19,6 @@ from openpype.lib.avalon_context import get_workdir_from_session log = logging.getLogger("Update Slap Comp") -self = sys.modules[__name__] -self._project = None - def _format_version_folder(folder): """Format a version folder based on the filepath @@ -212,9 +207,6 @@ def switch(asset_name, filepath=None, new=True): asset = get_asset_by_name(project_name, asset_name) assert asset, "Could not find '%s' in the database" % asset_name - # Get current project - self._project = get_project(project_name) - # Go to comp if not filepath: current_comp = api.get_current_comp() diff --git a/openpype/hosts/testhost/api/pipeline.py b/openpype/hosts/testhost/api/pipeline.py index 285fe8f8d6..1e05f336fb 100644 --- a/openpype/hosts/testhost/api/pipeline.py +++ b/openpype/hosts/testhost/api/pipeline.py @@ -1,6 +1,6 @@ import os import json -from openpype.pipeline import legacy_io +from openpype.client import get_asset_by_name class HostContext: @@ -17,10 +17,10 @@ class HostContext: if not asset_name: return project_name - asset_doc = legacy_io.find_one( - {"type": "asset", "name": asset_name}, - {"data.parents": 1} + asset_doc = get_asset_by_name( + project_name, asset_name, fields=["data.parents"] ) + parents = asset_doc.get("data", {}).get("parents") or [] hierarchy = [project_name] diff --git a/openpype/hosts/testhost/plugins/create/auto_creator.py b/openpype/hosts/testhost/plugins/create/auto_creator.py index 06b95375b1..8d59fc3242 100644 --- a/openpype/hosts/testhost/plugins/create/auto_creator.py +++ b/openpype/hosts/testhost/plugins/create/auto_creator.py @@ -1,10 +1,11 @@ from openpype.lib import NumberDef -from openpype.hosts.testhost.api import pipeline +from openpype.client import get_asset_by_name from openpype.pipeline import ( legacy_io, AutoCreator, CreatedInstance, ) +from openpype.hosts.testhost.api import pipeline class MyAutoCreator(AutoCreator): @@ -44,10 +45,7 @@ class MyAutoCreator(AutoCreator): host_name = legacy_io.Session["AVALON_APP"] if existing_instance is None: - asset_doc = legacy_io.find_one({ - "type": "asset", - "name": asset_name - }) + asset_doc = get_asset_by_name(project_name, asset_name) subset_name = self.get_subset_name( variant, task_name, asset_doc, project_name, host_name ) @@ -69,10 +67,7 @@ class MyAutoCreator(AutoCreator): existing_instance["asset"] != asset_name or existing_instance["task"] != task_name ): - asset_doc = legacy_io.find_one({ - "type": "asset", - "name": asset_name - }) + asset_doc = get_asset_by_name(project_name, asset_name) subset_name = self.get_subset_name( variant, task_name, asset_doc, project_name, host_name ) diff --git a/openpype/pipeline/create/context.py b/openpype/pipeline/create/context.py index 9b55c3b21e..eaaed39357 100644 --- a/openpype/pipeline/create/context.py +++ b/openpype/pipeline/create/context.py @@ -6,6 +6,7 @@ import inspect from uuid import uuid4 from contextlib import contextmanager +from openpype.client import get_assets from openpype.host import INewPublisher from openpype.pipeline import legacy_io from openpype.pipeline.mongodb import ( @@ -1082,15 +1083,10 @@ class CreateContext: for asset_name in task_names_by_asset_name.keys() if asset_name is not None ] - asset_docs = list(self.dbcon.find( - { - "type": "asset", - "name": {"$in": asset_names} - }, - { - "name": True, - "data.tasks": True - } + asset_docs = list(get_assets( + self.project_name, + asset_names=asset_names, + fields=["name", "data.tasks"] )) task_names_by_asset_name = {} diff --git a/openpype/pipeline/thumbnail.py b/openpype/pipeline/thumbnail.py index ec97b36954..eb383b16d9 100644 --- a/openpype/pipeline/thumbnail.py +++ b/openpype/pipeline/thumbnail.py @@ -2,6 +2,7 @@ import os import copy import logging +from openpype.client import get_project from . import legacy_io from .plugin_discover import ( discover, @@ -85,13 +86,8 @@ class TemplateResolver(ThumbnailResolver): self.log.debug("Thumbnail entity does not have set template") return - project = self.dbcon.find_one( - {"type": "project"}, - { - "name": True, - "data.code": True - } - ) + project_name = self.dbcon.active_project() + project = get_project(project_name, fields=["name", "data.code"]) template_data = copy.deepcopy( thumbnail_entity["data"].get("template_data") or {} diff --git a/openpype/scripts/fusion_switch_shot.py b/openpype/scripts/fusion_switch_shot.py index 245fc665f0..b5d3290e3a 100644 --- a/openpype/scripts/fusion_switch_shot.py +++ b/openpype/scripts/fusion_switch_shot.py @@ -3,6 +3,8 @@ import re import sys import logging +from openpype.client import get_project, get_asset_by_name, get_versions + # Pipeline imports from openpype.hosts.fusion import api import openpype.hosts.fusion.api.lib as fusion_lib @@ -19,9 +21,6 @@ from openpype.lib.avalon_context import get_workdir_from_session log = logging.getLogger("Update Slap Comp") -self = sys.modules[__name__] -self._project = None - def _format_version_folder(folder): """Format a version folder based on the filepath @@ -131,8 +130,8 @@ def update_frame_range(comp, representations): """ version_ids = [r["parent"] for r in representations] - versions = legacy_io.find({"type": "version", "_id": {"$in": version_ids}}) - versions = list(versions) + project_name = legacy_io.active_project() + versions = list(get_versions(project_name, version_ids=version_ids)) start = min(v["data"]["frameStart"] for v in versions) end = max(v["data"]["frameEnd"] for v in versions) @@ -162,15 +161,10 @@ def switch(asset_name, filepath=None, new=True): # Assert asset name exists # It is better to do this here then to wait till switch_shot does it - asset = legacy_io.find_one({"type": "asset", "name": asset_name}) + project_name = legacy_io.active_project() + asset = get_asset_by_name(project_name, asset_name) assert asset, "Could not find '%s' in the database" % asset_name - # Get current project - self._project = legacy_io.find_one({ - "type": "project", - "name": legacy_io.Session["AVALON_PROJECT"] - }) - # Go to comp if not filepath: current_comp = api.get_current_comp() From 0f97387032f5698c2142752a2945383aaf18036b Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 29 Jul 2022 11:17:00 +0200 Subject: [PATCH 609/785] remove unused import --- openpype/scripts/fusion_switch_shot.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/scripts/fusion_switch_shot.py b/openpype/scripts/fusion_switch_shot.py index b5d3290e3a..15f189e7cb 100644 --- a/openpype/scripts/fusion_switch_shot.py +++ b/openpype/scripts/fusion_switch_shot.py @@ -3,7 +3,7 @@ import re import sys import logging -from openpype.client import get_project, get_asset_by_name, get_versions +from openpype.client import get_asset_by_name, get_versions # Pipeline imports from openpype.hosts.fusion import api From f08008d61ec577f46a61469dc4bfa8a495d3dfbc Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Fri, 29 Jul 2022 12:22:20 +0300 Subject: [PATCH 610/785] Revert "Add OCIO submodule." This reverts commit 7adb8453861ce29f095082494ced13b755921fc5. --- .gitmodules | 3 --- vendor/configs/OpenColorIO-Configs | 1 - 2 files changed, 4 deletions(-) delete mode 160000 vendor/configs/OpenColorIO-Configs diff --git a/.gitmodules b/.gitmodules index bac3132b77..dfd89cdb3c 100644 --- a/.gitmodules +++ b/.gitmodules @@ -5,6 +5,3 @@ [submodule "tools/modules/powershell/PSWriteColor"] path = tools/modules/powershell/PSWriteColor url = https://github.com/EvotecIT/PSWriteColor.git -[submodule "vendor/configs/OpenColorIO-Configs"] - path = vendor/configs/OpenColorIO-Configs - url = https://github.com/imageworks/OpenColorIO-Configs diff --git a/vendor/configs/OpenColorIO-Configs b/vendor/configs/OpenColorIO-Configs deleted file mode 160000 index 0bb079c08b..0000000000 --- a/vendor/configs/OpenColorIO-Configs +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 0bb079c08be410030669cbf5f19ff869b88af953 From 1267e9ea921381ca0b5d8907c0a9271352f0c078 Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Fri, 29 Jul 2022 12:48:01 +0300 Subject: [PATCH 611/785] Revert "Revert "Add OCIO submodule."" This reverts commit f08008d61ec577f46a61469dc4bfa8a495d3dfbc. --- .gitmodules | 3 +++ vendor/configs/OpenColorIO-Configs | 1 + 2 files changed, 4 insertions(+) create mode 160000 vendor/configs/OpenColorIO-Configs diff --git a/.gitmodules b/.gitmodules index dfd89cdb3c..bac3132b77 100644 --- a/.gitmodules +++ b/.gitmodules @@ -5,3 +5,6 @@ [submodule "tools/modules/powershell/PSWriteColor"] path = tools/modules/powershell/PSWriteColor url = https://github.com/EvotecIT/PSWriteColor.git +[submodule "vendor/configs/OpenColorIO-Configs"] + path = vendor/configs/OpenColorIO-Configs + url = https://github.com/imageworks/OpenColorIO-Configs diff --git a/vendor/configs/OpenColorIO-Configs b/vendor/configs/OpenColorIO-Configs new file mode 160000 index 0000000000..0bb079c08b --- /dev/null +++ b/vendor/configs/OpenColorIO-Configs @@ -0,0 +1 @@ +Subproject commit 0bb079c08be410030669cbf5f19ff869b88af953 From de8668dc351ff77794da22427145fdbc6fc4b679 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 29 Jul 2022 12:13:43 +0200 Subject: [PATCH 612/785] OP-3283 - fix not create separate from multiple selected --- .../photoshop/plugins/create/create_image.py | 17 ++++++++--------- 1 file changed, 8 insertions(+), 9 deletions(-) diff --git a/openpype/hosts/photoshop/plugins/create/create_image.py b/openpype/hosts/photoshop/plugins/create/create_image.py index ebb268dc93..5688fe376e 100644 --- a/openpype/hosts/photoshop/plugins/create/create_image.py +++ b/openpype/hosts/photoshop/plugins/create/create_image.py @@ -42,17 +42,17 @@ class ImageCreator(Creator): top_level_selected_items = stub.get_selected_layers() if pre_create_data.get("use_selection"): only_single_item_selected = len(top_level_selected_items) == 1 - for selected_item in top_level_selected_items: - if ( - only_single_item_selected or - pre_create_data.get("create_multiple")): + if ( + only_single_item_selected or + pre_create_data.get("create_multiple")): + for selected_item in top_level_selected_items: if selected_item.group: groups_to_create.append(selected_item) else: top_layers_to_wrap.append(selected_item) - else: - group = stub.group_selected_layers(subset_name_from_ui) - groups_to_create.append(group) + else: + group = stub.group_selected_layers(subset_name_from_ui) + groups_to_create.append(group) if not groups_to_create and not top_layers_to_wrap: group = stub.create_group(subset_name_from_ui) @@ -156,5 +156,4 @@ class ImageCreator(Creator): def get_dynamic_data( cls, variant, task_name, asset_id, project_name, host_name ): - """Called by UI, empty value for layer must be provided.""" - return {"layer": ""} + return {"layer": ""} \ No newline at end of file From 831884232cf1c51a76c25470986ac3b01bc44841 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 29 Jul 2022 13:03:00 +0200 Subject: [PATCH 613/785] OP-3283 - fix without select and multiple If creator was configured to not use selection and not create multiple, it failed before. (It should create one wrapping group, eg. instance, around all. Locked background layer cannot be present!) --- openpype/hosts/photoshop/plugins/create/create_image.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/openpype/hosts/photoshop/plugins/create/create_image.py b/openpype/hosts/photoshop/plugins/create/create_image.py index 5688fe376e..2b6e5e6448 100644 --- a/openpype/hosts/photoshop/plugins/create/create_image.py +++ b/openpype/hosts/photoshop/plugins/create/create_image.py @@ -53,6 +53,13 @@ class ImageCreator(Creator): else: group = stub.group_selected_layers(subset_name_from_ui) groups_to_create.append(group) + else: + stub.select_layers(stub.get_layers()) + try: + group = stub.group_selected_layers(subset_name_from_ui) + except: + raise ValueError("Cannot group locked Bakcground layer!") + groups_to_create.append(group) if not groups_to_create and not top_layers_to_wrap: group = stub.create_group(subset_name_from_ui) From 90962d673511c60df44e34e746753208fc359a1c Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 29 Jul 2022 14:41:28 +0200 Subject: [PATCH 614/785] OP-3283 - refactored logic Easier solution found without reinventing logic. --- openpype/hosts/photoshop/api/plugin.py | 53 ------------------- .../photoshop/plugins/create/create_image.py | 25 ++++----- .../plugins/create/create_legacy_image.py | 24 ++++----- 3 files changed, 20 insertions(+), 82 deletions(-) diff --git a/openpype/hosts/photoshop/api/plugin.py b/openpype/hosts/photoshop/api/plugin.py index ecbfbf91e3..c80e6bbd06 100644 --- a/openpype/hosts/photoshop/api/plugin.py +++ b/openpype/hosts/photoshop/api/plugin.py @@ -2,11 +2,6 @@ import re from openpype.pipeline import LoaderPlugin from .launch_logic import stub -from openpype.pipeline import legacy_io -from openpype.client import get_asset_by_name -from openpype.settings import get_project_settings -from openpype.lib import prepare_template_data -from openpype.lib.profiles_filtering import filter_profiles def get_unique_layer_name(layers, asset_name, subset_name): @@ -38,51 +33,3 @@ class PhotoshopLoader(LoaderPlugin): @staticmethod def get_stub(): return stub() - - -def get_subset_template(family): - """Get subset template name from Settings""" - project_name = legacy_io.Session["AVALON_PROJECT"] - asset_name = legacy_io.Session["AVALON_ASSET"] - task_name = legacy_io.Session["AVALON_TASK"] - - asset_doc = get_asset_by_name( - project_name, asset_name, fields=["data.tasks"] - ) - asset_tasks = asset_doc.get("data", {}).get("tasks") or {} - task_info = asset_tasks.get(task_name) or {} - task_type = task_info.get("type") - - tools_settings = get_project_settings(project_name)["global"]["tools"] - profiles = tools_settings["creator"]["subset_name_profiles"] - filtering_criteria = { - "families": family, - "hosts": "photoshop", - "tasks": task_name, - "task_types": task_type - } - - matching_profile = filter_profiles(profiles, filtering_criteria) - if matching_profile: - return matching_profile["template"] - - -def get_subset_name_for_multiple(subset_name, subset_template, group, - family, variant): - """Update subset name with layer information to differentiate multiple - - subset_template might contain specific way how to format layer name - ({layer},{Layer} or {LAYER}). If subset_template doesn't contain placeholder - at all, fall back to original solution. - """ - if not subset_template or 'layer' not in subset_template.lower(): - subset_name += group.name.title().replace(" ", "") - else: - fill_pairs = { - "family": family, - "variant": variant, - "task": legacy_io.Session["AVALON_TASK"], - "layer": group.name - } - - return subset_template.format(**prepare_template_data(fill_pairs)) diff --git a/openpype/hosts/photoshop/plugins/create/create_image.py b/openpype/hosts/photoshop/plugins/create/create_image.py index 2b6e5e6448..44a74de650 100644 --- a/openpype/hosts/photoshop/plugins/create/create_image.py +++ b/openpype/hosts/photoshop/plugins/create/create_image.py @@ -5,10 +5,7 @@ from openpype.pipeline import ( CreatedInstance, legacy_io ) -from openpype.hosts.photoshop.api.plugin import ( - get_subset_template, - get_subset_name_for_multiple -) +from openpype.lib import prepare_template_data class ImageCreator(Creator): @@ -71,6 +68,7 @@ class ImageCreator(Creator): group = stub.group_selected_layers(layer.name) groups_to_create.append(group) + layer_name = '' creating_multiple_groups = len(groups_to_create) > 1 for group in groups_to_create: subset_name = subset_name_from_ui # reset to name from creator UI @@ -78,13 +76,12 @@ class ImageCreator(Creator): created_group_name = self._clean_highlights(stub, group.name) if creating_multiple_groups: - # concatenate with layer name to differentiate subsets - subset_template = get_subset_template(self.family) - subset_name = get_subset_name_for_multiple(subset_name, - subset_template, - group, - self.family, - data["variant"]) + layer_name = group.name + if "{layer}" not in subset_name.lower(): + subset_name += "{Layer}" + + layer_fill = prepare_template_data({"layer": layer_name}) + subset_name = subset_name.format(**layer_fill) if group.long_name: for directory in group.long_name[::-1]: @@ -160,7 +157,5 @@ class ImageCreator(Creator): return item.replace(stub.PUBLISH_ICON, '').replace(stub.LOADED_ICON, '') @classmethod - def get_dynamic_data( - cls, variant, task_name, asset_id, project_name, host_name - ): - return {"layer": ""} \ No newline at end of file + def get_dynamic_data(cls, *args, **kwargs): + return {"layer": "{layer}"} diff --git a/openpype/hosts/photoshop/plugins/create/create_legacy_image.py b/openpype/hosts/photoshop/plugins/create/create_legacy_image.py index d1a54a407e..e465c30abd 100644 --- a/openpype/hosts/photoshop/plugins/create/create_legacy_image.py +++ b/openpype/hosts/photoshop/plugins/create/create_legacy_image.py @@ -2,10 +2,7 @@ from Qt import QtWidgets from openpype.pipeline import create from openpype.hosts.photoshop import api as photoshop -from openpype.hosts.photoshop.api.plugin import ( - get_subset_template, - get_subset_name_for_multiple -) +from openpype.lib import prepare_template_data class CreateImage(create.LegacyCreator): @@ -80,6 +77,7 @@ class CreateImage(create.LegacyCreator): groups.append(group) creator_subset_name = self.data["subset"] + layer_name = '' for group in groups: long_names = [] group.name = group.name.replace(stub.PUBLISH_ICON, ''). \ @@ -87,12 +85,12 @@ class CreateImage(create.LegacyCreator): subset_name = creator_subset_name if len(groups) > 1: - subset_template = get_subset_template(self.family) - subset_name = get_subset_name_for_multiple(subset_name, - subset_template, - group, - self.family, - self.data["variant"]) + layer_name = group.name + if "{layer}" not in subset_name.lower(): + subset_name += "{Layer}" + + layer_fill = prepare_template_data({"layer": layer_name}) + subset_name = subset_name.format(**layer_fill) if group.long_name: for directory in group.long_name[::-1]: @@ -110,7 +108,5 @@ class CreateImage(create.LegacyCreator): stub.rename_layer(group.id, stub.PUBLISH_ICON + group.name) @classmethod - def get_dynamic_data( - cls, variant, task_name, asset_id, project_name, host_name - ): - return {"layer": ""} + def get_dynamic_data(cls, *args, **kwargs): + return {"layer": "{layer}"} From 4ac9ed6886ee455640e93c31fac510270ce571bd Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 29 Jul 2022 16:24:16 +0200 Subject: [PATCH 615/785] OP-3283 - fix invalid characters in subset name Removal of invalid characters must be done in Create phase to persist. --- openpype/hosts/photoshop/plugins/create/create_image.py | 9 ++++++++- .../photoshop/plugins/create/create_legacy_image.py | 9 ++++++++- .../hosts/photoshop/plugins/publish/validate_naming.py | 8 ++++++++ 3 files changed, 24 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/photoshop/plugins/create/create_image.py b/openpype/hosts/photoshop/plugins/create/create_image.py index 44a74de650..2cfbfa8778 100644 --- a/openpype/hosts/photoshop/plugins/create/create_image.py +++ b/openpype/hosts/photoshop/plugins/create/create_image.py @@ -1,3 +1,5 @@ +import re + from openpype.hosts.photoshop import api from openpype.lib import BoolDef from openpype.pipeline import ( @@ -6,6 +8,7 @@ from openpype.pipeline import ( legacy_io ) from openpype.lib import prepare_template_data +from openpype.pipeline.create import SUBSET_NAME_ALLOWED_SYMBOLS class ImageCreator(Creator): @@ -76,7 +79,11 @@ class ImageCreator(Creator): created_group_name = self._clean_highlights(stub, group.name) if creating_multiple_groups: - layer_name = group.name + layer_name = re.sub( + "[^{}]+".format(SUBSET_NAME_ALLOWED_SYMBOLS), + "", + group.name + ) if "{layer}" not in subset_name.lower(): subset_name += "{Layer}" diff --git a/openpype/hosts/photoshop/plugins/create/create_legacy_image.py b/openpype/hosts/photoshop/plugins/create/create_legacy_image.py index e465c30abd..2792a775e0 100644 --- a/openpype/hosts/photoshop/plugins/create/create_legacy_image.py +++ b/openpype/hosts/photoshop/plugins/create/create_legacy_image.py @@ -1,8 +1,11 @@ +import re + from Qt import QtWidgets from openpype.pipeline import create from openpype.hosts.photoshop import api as photoshop from openpype.lib import prepare_template_data +from openpype.pipeline.create import SUBSET_NAME_ALLOWED_SYMBOLS class CreateImage(create.LegacyCreator): @@ -85,7 +88,11 @@ class CreateImage(create.LegacyCreator): subset_name = creator_subset_name if len(groups) > 1: - layer_name = group.name + layer_name = re.sub( + "[^{}]+".format(SUBSET_NAME_ALLOWED_SYMBOLS), + "", + group.name + ) if "{layer}" not in subset_name.lower(): subset_name += "{Layer}" diff --git a/openpype/hosts/photoshop/plugins/publish/validate_naming.py b/openpype/hosts/photoshop/plugins/publish/validate_naming.py index b53f4e8198..8106d6ff16 100644 --- a/openpype/hosts/photoshop/plugins/publish/validate_naming.py +++ b/openpype/hosts/photoshop/plugins/publish/validate_naming.py @@ -4,6 +4,7 @@ import pyblish.api import openpype.api from openpype.pipeline import PublishXmlValidationError from openpype.hosts.photoshop import api as photoshop +from openpype.pipeline.create import SUBSET_NAME_ALLOWED_SYMBOLS class ValidateNamingRepair(pyblish.api.Action): @@ -50,6 +51,13 @@ class ValidateNamingRepair(pyblish.api.Action): subset_name = re.sub(invalid_chars, replace_char, instance.data["subset"]) + # format from Tool Creator + subset_name = re.sub( + "[^{}]+".format(SUBSET_NAME_ALLOWED_SYMBOLS), + "", + subset_name + ) + layer_meta["subset"] = subset_name stub.imprint(instance_id, layer_meta) From d63d0cfb6f40ba7a0cc5b6a0cb3cc8d3057da6ba Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Fri, 29 Jul 2022 17:56:39 +0300 Subject: [PATCH 616/785] Remove incorrect code. This reverts commit a26fd8394c71f0f01552f20987ac6618747d1572. --- openpype/hosts/maya/api/menu.py | 18 ++++++------------ 1 file changed, 6 insertions(+), 12 deletions(-) diff --git a/openpype/hosts/maya/api/menu.py b/openpype/hosts/maya/api/menu.py index ed546ba7a8..c3ce8b0227 100644 --- a/openpype/hosts/maya/api/menu.py +++ b/openpype/hosts/maya/api/menu.py @@ -6,7 +6,7 @@ from Qt import QtWidgets, QtGui import maya.utils import maya.cmds as cmds -from openpype.api import BuildWorkfile, get_current_project_settings +from openpype.api import BuildWorkfile from openpype.settings import get_project_settings from openpype.pipeline import legacy_io from openpype.tools.utils import host_tools @@ -99,17 +99,11 @@ def install(): cmds.menuItem(divider=True) - render_settings_flag = get_current_project_settings()["maya"]["RenderSettings"]["apply_render_settings"] # noqa - if render_settings_flag: - cmds.menuItem( - "Set Render Settings", - command=lambda *args: lib_rendersettings.RenderSettings().set_default_renderer_settings(), # noqa - enable=True) - else: - cmds.menuItem( - "Set Render Settings", - command=lambda *args: lib_rendersettings.RenderSettings().set_default_renderer_settings(), # noqa - enable=False) + cmds.menuItem( + "Set Render Settings", + command=lambda *args: lib_rendersettings.RenderSettings().set_default_renderer_settings() # noqa + ) + cmds.menuItem(divider=True) cmds.menuItem( From 5c8eac6b6357fa80859ffbed45be41cf8ae106da Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 29 Jul 2022 17:07:57 +0200 Subject: [PATCH 617/785] OP-3405 - replaced find with get_representations --- .../modules/sync_server/sync_server_module.py | 32 +++++++------------ 1 file changed, 12 insertions(+), 20 deletions(-) diff --git a/openpype/modules/sync_server/sync_server_module.py b/openpype/modules/sync_server/sync_server_module.py index 4027561d22..81aff9368f 100644 --- a/openpype/modules/sync_server/sync_server_module.py +++ b/openpype/modules/sync_server/sync_server_module.py @@ -25,6 +25,8 @@ from .providers import lib from .utils import time_function, SyncStatus, SiteAlreadyPresentError +from openpype.client import get_representations + log = PypeLogger.get_logger("SyncServer") @@ -344,6 +346,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): "files.sites.name": site_name } + # TODO currently not possible to replace with get_representations representations = list( self.connection.database[collection].find(query)) if not representations: @@ -391,12 +394,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): """ self.log.debug("Validation of {} for {} started".format(collection, site_name)) - query = { - "type": "representation" - } - - representations = list( - self.connection.database[collection].find(query)) + representations = list(get_representations(collection)) if not representations: self.log.debug("No repre found") return @@ -1593,14 +1591,11 @@ class SyncServerModule(OpenPypeModule, ITrayModule): not 'force' ValueError - other errors (repre not found, misconfiguration) """ - query = { - "_id": ObjectId(representation_id) - } - - representation = self.connection.database[collection].find_one(query) - if not representation: + representations = get_representations(collection, [representation_id]) + if not representations: raise ValueError("Representation {} not found in {}". format(representation_id, collection)) + representation = representations[0] if side and site_name: raise ValueError("Misconfiguration, only one of side and " + "site_name arguments should be passed.") @@ -1808,18 +1803,15 @@ class SyncServerModule(OpenPypeModule, ITrayModule): provider_name = self.get_provider_for_site(site=site_name) if provider_name == 'local_drive': - query = { - "_id": ObjectId(representation_id) - } - - representation = list( - self.connection.database[collection].find(query)) - if not representation: + representations = list(get_representations(collection, + [representation_id], + fields=["files"])) + if not representations: self.log.debug("No repre {} found".format( representation_id)) return - representation = representation.pop() + representation = representations.pop() local_file_path = '' for file in representation.get("files"): local_file_path = self.get_local_file_path(collection, From c944ae35c9848045cfb73ccfc1b93f30f7af2989 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 29 Jul 2022 17:17:03 +0200 Subject: [PATCH 618/785] OP-3405 - replaced find with get_representation_by_id --- openpype/modules/sync_server/tray/models.py | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/openpype/modules/sync_server/tray/models.py b/openpype/modules/sync_server/tray/models.py index 6d1e85c17a..a97797c920 100644 --- a/openpype/modules/sync_server/tray/models.py +++ b/openpype/modules/sync_server/tray/models.py @@ -11,6 +11,7 @@ from openpype.tools.utils.delegates import pretty_timestamp from openpype.lib import PypeLogger from openpype.api import get_local_site_id +from openpype.client import get_representation_by_id from . import lib @@ -919,8 +920,7 @@ class SyncRepresentationSummaryModel(_SyncRepresentationModel): repre_id = self.data(index, Qt.UserRole) - representation = list(self.dbcon.find({"type": "representation", - "_id": repre_id})) + representation = get_representation_by_id(self.project, repre_id) if representation: self.sync_server.update_db(self.project, None, None, representation.pop(), @@ -1357,11 +1357,10 @@ class SyncRepresentationDetailModel(_SyncRepresentationModel): file_id = self.data(index, Qt.UserRole) updated_file = None - # conversion from cursor to list - representations = list(self.dbcon.find({"type": "representation", - "_id": self._id})) + representation = get_representation_by_id(self.project, self._id) + if not representation: + return - representation = representations.pop() for repre_file in representation["files"]: if repre_file["_id"] == file_id: updated_file = repre_file From 292d071f442a494cabd2161512012b13e391a9f8 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 29 Jul 2022 17:39:59 +0200 Subject: [PATCH 619/785] OP-3405 - query is required for updates --- openpype/modules/sync_server/sync_server_module.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/openpype/modules/sync_server/sync_server_module.py b/openpype/modules/sync_server/sync_server_module.py index 81aff9368f..6a3dbf6095 100644 --- a/openpype/modules/sync_server/sync_server_module.py +++ b/openpype/modules/sync_server/sync_server_module.py @@ -1611,6 +1611,10 @@ class SyncServerModule(OpenPypeModule, ITrayModule): elem = {"name": site_name} + query = { + "_id": ObjectId(representation_id) + } + if file_id: # reset site for particular file self._reset_site_for_file(collection, query, elem, file_id, site_name) From 0f5ec0f0c4cbd4db8c4968db75f6375b6bdf7f59 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 29 Jul 2022 17:54:51 +0200 Subject: [PATCH 620/785] OP-3405 - used get_representation_by_id --- .../modules/sync_server/sync_server_module.py | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/openpype/modules/sync_server/sync_server_module.py b/openpype/modules/sync_server/sync_server_module.py index 6a3dbf6095..71e35c7839 100644 --- a/openpype/modules/sync_server/sync_server_module.py +++ b/openpype/modules/sync_server/sync_server_module.py @@ -25,7 +25,7 @@ from .providers import lib from .utils import time_function, SyncStatus, SiteAlreadyPresentError -from openpype.client import get_representations +from openpype.client import get_representations, get_representation_by_id log = PypeLogger.get_logger("SyncServer") @@ -1591,11 +1591,12 @@ class SyncServerModule(OpenPypeModule, ITrayModule): not 'force' ValueError - other errors (repre not found, misconfiguration) """ - representations = get_representations(collection, [representation_id]) - if not representations: + representation = get_representation_by_id(collection, + representation_id) + if not representation: raise ValueError("Representation {} not found in {}". format(representation_id, collection)) - representation = representations[0] + if side and site_name: raise ValueError("Misconfiguration, only one of side and " + "site_name arguments should be passed.") @@ -1807,15 +1808,14 @@ class SyncServerModule(OpenPypeModule, ITrayModule): provider_name = self.get_provider_for_site(site=site_name) if provider_name == 'local_drive': - representations = list(get_representations(collection, - [representation_id], - fields=["files"])) - if not representations: + representation = get_representation_by_id(collection, + representation_id, + fields=["files"]) + if not representation: self.log.debug("No repre {} found".format( representation_id)) return - representation = representations.pop() local_file_path = '' for file in representation.get("files"): local_file_path = self.get_local_file_path(collection, From ccdff822a54c6bf146ad1a8a9b2206c319967719 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 29 Jul 2022 18:11:11 +0200 Subject: [PATCH 621/785] moved 'get_project_database' and 'get_project_connection' to mongo --- openpype/client/entities.py | 30 ++---------------------------- openpype/client/mongo.py | 25 +++++++++++++++++++++++++ 2 files changed, 27 insertions(+), 28 deletions(-) diff --git a/openpype/client/entities.py b/openpype/client/entities.py index dd5d831ecf..0e94b99ae6 100644 --- a/openpype/client/entities.py +++ b/openpype/client/entities.py @@ -6,38 +6,12 @@ that has project name as a context (e.g. on 'ProjectEntity'?). + We will need more specific functions doing wery specific queires really fast. """ -import os import collections import six from bson.objectid import ObjectId -from .mongo import OpenPypeMongoConnection - - -def _get_project_database(): - db_name = os.environ.get("AVALON_DB") or "avalon" - return OpenPypeMongoConnection.get_mongo_client()[db_name] - - -def get_project_connection(project_name): - """Direct access to mongo collection. - - We're trying to avoid using direct access to mongo. This should be used - only for Create, Update and Remove operations until there are implemented - api calls for that. - - Args: - project_name(str): Project name for which collection should be - returned. - - Returns: - pymongo.Collection: Collection realated to passed project. - """ - - if not project_name: - raise ValueError("Invalid project name {}".format(str(project_name))) - return _get_project_database()[project_name] +from .mongo import get_project_database, get_project_connection def _prepare_fields(fields, required_fields=None): @@ -72,7 +46,7 @@ def _convert_ids(in_ids): def get_projects(active=True, inactive=False, fields=None): - mongodb = _get_project_database() + mongodb = get_project_database() for project_name in mongodb.collection_names(): if project_name in ("system.indexes",): continue diff --git a/openpype/client/mongo.py b/openpype/client/mongo.py index a747250107..72acbc5476 100644 --- a/openpype/client/mongo.py +++ b/openpype/client/mongo.py @@ -208,3 +208,28 @@ class OpenPypeMongoConnection: mongo_url, time.time() - t1 )) return mongo_client + + +def get_project_database(): + db_name = os.environ.get("AVALON_DB") or "avalon" + return OpenPypeMongoConnection.get_mongo_client()[db_name] + + +def get_project_connection(project_name): + """Direct access to mongo collection. + + We're trying to avoid using direct access to mongo. This should be used + only for Create, Update and Remove operations until there are implemented + api calls for that. + + Args: + project_name(str): Project name for which collection should be + returned. + + Returns: + pymongo.Collection: Collection realated to passed project. + """ + + if not project_name: + raise ValueError("Invalid project name {}".format(str(project_name))) + return get_project_database()[project_name] From c429a41188c614570e9d1d39cd6605897fbfaf38 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 29 Jul 2022 18:12:43 +0200 Subject: [PATCH 622/785] added initial variant of operations --- openpype/client/operations.py | 249 ++++++++++++++++++++++++++++++++++ 1 file changed, 249 insertions(+) create mode 100644 openpype/client/operations.py diff --git a/openpype/client/operations.py b/openpype/client/operations.py new file mode 100644 index 0000000000..365833b318 --- /dev/null +++ b/openpype/client/operations.py @@ -0,0 +1,249 @@ +import uuid +import copy +from abc import ABCMeta, abstractmethod + +import six +from bson.objectid import ObjectId +from pymongo import DeleteOne, InsertOne, UpdateOne + +from .mongo import get_project_connection + +REMOVED_VALUE = object() + + +@six.add_metaclass(ABCMeta) +class AbstractOperation(object): + """Base operation class.""" + + def __init__(self, entity_type): + self._entity_type = entity_type + self._id = uuid.uuid4() + + @property + def id(self): + return self._id + + @property + def entity_type(self): + return self._entity_type + + @abstractmethod + def to_mongo_operation(self): + pass + + +class CreateOperation(AbstractOperation): + def __init__(self, project_name, entity_type, data): + super(CreateOperation, self).__init__(entity_type) + + if not data: + data = {} + else: + data = copy.deepcopy(dict(data)) + + if "_id" not in data: + data["_id"] = ObjectId() + else: + data["_id"] = ObjectId(data["_id"]) + + self._entity_id = data["_id"] + self._data = data + + def __setitem__(self, key, value): + self.set_value(key, value) + + def __getitem__(self, key): + return self.data[key] + + def set_value(self, key, value): + self.data[key] = value + + def get(self, key, *args, **kwargs): + return self.data.get(key, *args, **kwargs) + + @property + def entity_id(self): + return self._entity_id + + @property + def data(self): + return self._data + + def to_mongo_operation(self): + return InsertOne(copy.deepcopy(self._data)) + + def to_data(self): + return { + "operation": "create", + "entity_type": self.entity_type, + "data": copy.deepcopy(self.data) + } + + +class UpdateOperation(AbstractOperation): + def __init__(self, project_name, entity_type, entity_id, update_fields): + super(CreateOperation, self).__init__(entity_type) + + self._entity_id = ObjectId(entity_id) + self._update_fields = update_fields + + @property + def entity_id(self): + return self._entity_id + + @property + def update_fields(self): + return self._update_fields + + def to_mongo_operation(self): + unset_data = {} + set_data = {} + for key, value in self._update_fields.items(): + if value is REMOVED_VALUE: + unset_data[key] = value + else: + set_data[key] = value + + op_data = {} + if unset_data: + op_data["$unset"] = unset_data + if set_data: + op_data["$set"] = set_data + + if not op_data: + return None + + return UpdateOne( + {"_id": self.entity_id}, + op_data + ) + + def to_data(self): + fields = {} + for key, value in self._update_fields.items(): + if value is REMOVED_VALUE: + value = None + fields[key] = value + + return { + "operation": "update", + "entity_type": self.entity_type, + "entity_id": str(self.entity_id), + "fields": fields + } + + +class DeleteOperation(AbstractOperation): + def __init__(self, entity_type, entity_id): + super(DeleteOperation, self).__init__(entity_type) + + self._entity_id = ObjectId(entity_id) + + @property + def entity_id(self): + return self._entity_id + + def to_mongo_operation(self): + return DeleteOne({"_id": self.entity_id}) + + def to_data(self): + return { + "operation": "delete", + "entity_type": self.entity_type, + "entity_id": str(self.entity_id) + } + + +class OperationsSession(object): + """Session storing operations that should happen in an order. + + At this moment does not handle anything special can be sonsidered as + stupid list of operations that will happen after each other. If creation + of same entity is there multiple times it's handled in any way and document + values are not validated. + + All operations must be related to single project. + + Args: + project_name (str): Project name to which are operations related. + """ + + def __init__(self, project_name): + self._project_name = project_name + self._operations = [] + + @property + def project_name(self): + return self._project_name + + def add(self, operation): + """Add operation to be processed. + + Args: + operation (BaseOperation): Operation that should be processed. + """ + if not isinstance( + operation, + (CreateOperation, UpdateOperation, DeleteOperation) + ): + raise TypeError("Expected Operation object got {}".format( + str(type(operation)) + )) + + self._operations.append(operation) + + def append(self, operation): + """Add operation to be processed. + + Args: + operation (BaseOperation): Operation that should be processed. + """ + + self.add(operation) + + def extend(self, operations): + """Add operations to be processed. + + Args: + operations (List[BaseOperation]): Operations that should be + processed. + """ + + for operation in operations: + self.add(operation) + + def remove(self, operation): + """Remove operation.""" + + self._operations.remove(operation) + + def clear(self): + """Clear all registered operations.""" + + self._operations = [] + + def to_data(self): + return { + "project_name": self.project_name, + "operations": [ + operation.to_data() + for operation in self._operations + ] + } + + def commit(self): + """Commit session operations.""" + + operations, self._operations = self._operations, [] + if not operations: + return + + bulk_writes = [] + for operation in operations: + mongo_op = operation.to_mongo_operation() + if mongo_op is not None: + bulk_writes.append(mongo_op) + + if bulk_writes: + collection = get_project_connection(self.project_name) + collection.bulk_write(bulk_writes) From 7d23558ac038be8e951682f0f945b6c58a8717b0 Mon Sep 17 00:00:00 2001 From: Felix David Date: Fri, 29 Jul 2022 18:27:52 +0200 Subject: [PATCH 623/785] Kitsu|Fix: Collect entities error cause of Python2 Fix #3552 --- .../modules/kitsu/plugins/publish/collect_kitsu_entities.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/modules/kitsu/plugins/publish/collect_kitsu_entities.py b/openpype/modules/kitsu/plugins/publish/collect_kitsu_entities.py index d28ded06c7..d2a6f3f303 100644 --- a/openpype/modules/kitsu/plugins/publish/collect_kitsu_entities.py +++ b/openpype/modules/kitsu/plugins/publish/collect_kitsu_entities.py @@ -39,10 +39,10 @@ class CollectKitsuEntities(pyblish.api.ContextPlugin): kitsu_entity = gazu.asset.get_asset(zou_asset_data["id"]) if not kitsu_entity: - raise AssertionError(f"{entity_type} not found in kitsu!") + raise AssertionError("{} not found in kitsu!".format(entity_type)) context.data["kitsu_entity"] = kitsu_entity - self.log.debug(f"Collect kitsu {entity_type}: {kitsu_entity}") + self.log.debug("Collect kitsu {}: {}".format(entity_type, kitsu_entity)) if zou_task_data: kitsu_task = gazu.task.get_task(zou_task_data["id"]) From 49af7d9d2dad8c8deec3226cbb50c4a8ecd38694 Mon Sep 17 00:00:00 2001 From: Felix David Date: Fri, 29 Jul 2022 18:47:32 +0200 Subject: [PATCH 624/785] black line length --- .../modules/kitsu/plugins/publish/collect_kitsu_entities.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/openpype/modules/kitsu/plugins/publish/collect_kitsu_entities.py b/openpype/modules/kitsu/plugins/publish/collect_kitsu_entities.py index d2a6f3f303..c9e78b59eb 100644 --- a/openpype/modules/kitsu/plugins/publish/collect_kitsu_entities.py +++ b/openpype/modules/kitsu/plugins/publish/collect_kitsu_entities.py @@ -42,7 +42,9 @@ class CollectKitsuEntities(pyblish.api.ContextPlugin): raise AssertionError("{} not found in kitsu!".format(entity_type)) context.data["kitsu_entity"] = kitsu_entity - self.log.debug("Collect kitsu {}: {}".format(entity_type, kitsu_entity)) + self.log.debug( + "Collect kitsu {}: {}".format(entity_type, kitsu_entity) + ) if zou_task_data: kitsu_task = gazu.task.get_task(zou_task_data["id"]) From f9f53fe19c68302dc90362c668bb5bededf93e36 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 29 Jul 2022 19:08:15 +0200 Subject: [PATCH 625/785] add missing method which was resolved as part of HiddenCreator --- openpype/hosts/traypublisher/api/plugin.py | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/openpype/hosts/traypublisher/api/plugin.py b/openpype/hosts/traypublisher/api/plugin.py index a0c42a55b1..a3eead51c8 100644 --- a/openpype/hosts/traypublisher/api/plugin.py +++ b/openpype/hosts/traypublisher/api/plugin.py @@ -92,6 +92,21 @@ class TrayPublishCreator(Creator): for instance in instances: self._remove_instance_from_context(instance) + def _store_new_instance(self, new_instance): + """Tray publisher specific method to store instance. + + Instance is stored into "workfile" of traypublisher and also add it + to CreateContext. + + Args: + new_instance (CreatedInstance): Instance that should be stored. + """ + + # Host implementation of storing metadata about instance + HostContext.add_instance(new_instance.data_to_store()) + # Add instance to current context + self._add_instance_to_context(new_instance) + class SettingsCreator(TrayPublishCreator): create_allow_context_change = True From c0de0d5b89654f38a95f779af7b7e450bf58a5ae Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 29 Jul 2022 19:08:46 +0200 Subject: [PATCH 626/785] use '_store_new_instance' in editorial creators --- .../plugins/create/create_editorial.py | 38 ++++--------------- 1 file changed, 8 insertions(+), 30 deletions(-) diff --git a/openpype/hosts/traypublisher/plugins/create/create_editorial.py b/openpype/hosts/traypublisher/plugins/create/create_editorial.py index 3bc8f89556..7ca68f39e8 100644 --- a/openpype/hosts/traypublisher/plugins/create/create_editorial.py +++ b/openpype/hosts/traypublisher/plugins/create/create_editorial.py @@ -75,20 +75,13 @@ class EditorialClipInstanceCreatorBase(HiddenTrayPublishCreator): self.log.info(f"instance_data: {instance_data}") subset_name = instance_data["subset"] - return self._create_instance(subset_name, instance_data) - - def _create_instance(self, subset_name, data): - # Create new instance - new_instance = CreatedInstance(self.family, subset_name, data, self) + new_instance = CreatedInstance( + self.family, subset_name, instance_data, self + ) self.log.info(f"instance_data: {pformat(new_instance.data)}") - # Host implementation of storing metadata about instance - HostContext.add_instance(new_instance.data_to_store()) - # Add instance to current context - self._add_instance_to_context(new_instance) - - return new_instance + self._store_new_instance(new_instance) def get_instance_attr_defs(self): return [ @@ -299,8 +292,10 @@ or updating already created. Publishing will create OTIO file. "editorialSourcePath": media_path, "otioTimeline": otio.adapters.write_to_string(otio_timeline) }) - - self._create_instance(self.family, subset_name, data) + new_instance = CreatedInstance( + self.family, subset_name, data, self + ) + self._store_new_instance(new_instance) def _create_otio_timeline(self, sequence_path, fps): """Creating otio timeline from sequence path @@ -820,23 +815,6 @@ or updating already created. Publishing will create OTIO file. "Please check names in the input sequence files." ) - def _create_instance(self, family, subset_name, instance_data): - """ CreatedInstance object creator - - Args: - family (str): family name - subset_name (str): subset name - instance_data (dict): instance data - """ - # Create new instance - new_instance = CreatedInstance( - family, subset_name, instance_data, self - ) - # Host implementation of storing metadata about instance - HostContext.add_instance(new_instance.data_to_store()) - # Add instance to current context - self._add_instance_to_context(new_instance) - def get_pre_create_attr_defs(self): """ Creating pre-create attributes at creator plugin. From 441f2269d4a3db6a5b8cbb5023d386eb1fee143d Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 29 Jul 2022 19:10:13 +0200 Subject: [PATCH 627/785] removed unused import --- openpype/hosts/traypublisher/plugins/create/create_editorial.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/openpype/hosts/traypublisher/plugins/create/create_editorial.py b/openpype/hosts/traypublisher/plugins/create/create_editorial.py index 7ca68f39e8..e9bca79b31 100644 --- a/openpype/hosts/traypublisher/plugins/create/create_editorial.py +++ b/openpype/hosts/traypublisher/plugins/create/create_editorial.py @@ -29,8 +29,6 @@ from openpype.lib import ( UILabelDef ) -from openpype.hosts.traypublisher.api.pipeline import HostContext - CLIP_ATTR_DEFS = [ EnumDef( From a11ef9f346b1b410ae99483dad3bb53cd187b084 Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Fri, 29 Jul 2022 20:20:20 +0300 Subject: [PATCH 628/785] Append frame reset feature, handle prefix key properly --- openpype/hosts/maya/api/lib_rendersettings.py | 18 +++++++++++++----- .../defaults/project_settings/maya.json | 3 ++- .../schemas/schema_maya_render_settings.json | 7 ++++++- 3 files changed, 21 insertions(+), 7 deletions(-) diff --git a/openpype/hosts/maya/api/lib_rendersettings.py b/openpype/hosts/maya/api/lib_rendersettings.py index 8c09175614..7eae5bbbbf 100644 --- a/openpype/hosts/maya/api/lib_rendersettings.py +++ b/openpype/hosts/maya/api/lib_rendersettings.py @@ -66,12 +66,20 @@ class RenderSettings(object): )] except KeyError: aov_separator = "_" + reset_frame = self._project_settings["maya"]["RenderSettings"]["reset_current_frame"] # noqa - prefix = self._image_prefixes[renderer] - prefix = prefix.replace("{aov_separator}", aov_separator) - cmds.setAttr(self._image_prefix_nodes[renderer], - prefix, - type="string") + if reset_frame: + start_frame = cmds.getAttr("defaultRenderGlobals.startFrame") + cmds.currentTime(start_frame, edit=True) + + if renderer in self._image_prefix_nodes: + prefix = self._image_prefixes[renderer] + prefix = prefix.replace("{aov_separator}", aov_separator) + cmds.setAttr(self._image_prefix_nodes[renderer], + prefix, + type="string") + else: + print("{0} isn't a supported renderer to autoset settings.".format(renderer)) # TODO: handle not having res values in the doc width = asset_doc["data"].get("resolutionWidth") diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index 6e50f13418..5f11072b12 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -35,6 +35,7 @@ "apply_render_settings": true, "default_render_image_folder": "", "aov_separator": "underscore", + "reset_current_frame": false, "arnold_renderer": { "image_prefix": "maya///_", "image_format": "exr", @@ -973,4 +974,4 @@ "ValidateNoAnimation": false } } -} \ No newline at end of file +} diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_render_settings.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_render_settings.json index 96b67dc66a..9b6b6f1eed 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_render_settings.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_render_settings.json @@ -26,6 +26,11 @@ {"dot": ". (dot)"} ] }, + { + "key": "reset_current_frame", + "label": "Reset Current Frame", + "type": "boolean" + }, { "type": "dict", "collapsible": true, @@ -408,4 +413,4 @@ ] } ] -} \ No newline at end of file +} From 9967cd0d0aec49122b24ed7a6b388c832e845ca4 Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Fri, 29 Jul 2022 20:20:43 +0300 Subject: [PATCH 629/785] Append settings propagation to render instance creator. --- openpype/hosts/maya/plugins/create/create_render.py | 3 ++- vendor/configs/OpenColorIO-Configs | 1 + 2 files changed, 3 insertions(+), 1 deletion(-) create mode 160000 vendor/configs/OpenColorIO-Configs diff --git a/openpype/hosts/maya/plugins/create/create_render.py b/openpype/hosts/maya/plugins/create/create_render.py index d4ad488b32..395984aee0 100644 --- a/openpype/hosts/maya/plugins/create/create_render.py +++ b/openpype/hosts/maya/plugins/create/create_render.py @@ -87,7 +87,8 @@ class CreateRender(plugin.Creator): return self._project_settings = get_project_settings( legacy_io.Session["AVALON_PROJECT"]) - + if self._project_settings["maya"]["RenderSettings"]["apply_render_settings"]: + lib_rendersettings.RenderSettings().set_default_renderer_settings() manager = ModulesManager() self.deadline_module = manager.modules_by_name["deadline"] try: diff --git a/vendor/configs/OpenColorIO-Configs b/vendor/configs/OpenColorIO-Configs new file mode 160000 index 0000000000..0bb079c08b --- /dev/null +++ b/vendor/configs/OpenColorIO-Configs @@ -0,0 +1 @@ +Subproject commit 0bb079c08be410030669cbf5f19ff869b88af953 From cc5abb15142a7c9d31d5602ba6434f9f534a670e Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 29 Jul 2022 19:20:47 +0200 Subject: [PATCH 630/785] few minor modifications and changes --- openpype/client/operations.py | 125 +++++++++++++++++++++------------- 1 file changed, 79 insertions(+), 46 deletions(-) diff --git a/openpype/client/operations.py b/openpype/client/operations.py index 365833b318..517a53c27f 100644 --- a/openpype/client/operations.py +++ b/openpype/client/operations.py @@ -15,9 +15,14 @@ REMOVED_VALUE = object() class AbstractOperation(object): """Base operation class.""" - def __init__(self, entity_type): + def __init__(self, project_name, entity_type): + self._project_name = project_name self._entity_type = entity_type - self._id = uuid.uuid4() + self._id = str(uuid.uuid4()) + + @property + def project_name(self): + return self._project_name @property def id(self): @@ -27,14 +32,28 @@ class AbstractOperation(object): def entity_type(self): return self._entity_type + @abstractproperty + def operation_name(self): + pass + @abstractmethod def to_mongo_operation(self): pass + def to_data(self): + return { + "id": self._id, + "entity_type": self.entity_type, + "project_name": self.project_name, + "operation": self.operation_name + } + class CreateOperation(AbstractOperation): + operation_name = "create" + def __init__(self, project_name, entity_type, data): - super(CreateOperation, self).__init__(entity_type) + super(CreateOperation, self).__init__(project_name, entity_type) if not data: data = {} @@ -73,32 +92,32 @@ class CreateOperation(AbstractOperation): return InsertOne(copy.deepcopy(self._data)) def to_data(self): - return { - "operation": "create", - "entity_type": self.entity_type, - "data": copy.deepcopy(self.data) - } + output = super(CreateOperation, self).to_data() + output["data"] = copy.deepcopy(self.data) + return output class UpdateOperation(AbstractOperation): - def __init__(self, project_name, entity_type, entity_id, update_fields): - super(CreateOperation, self).__init__(entity_type) + operation_name = "update" + + def __init__(self, project_name, entity_type, entity_id, update_data): + super(UpdateOperation, self).__init__(project_name, entity_type) self._entity_id = ObjectId(entity_id) - self._update_fields = update_fields + self._update_data = update_data @property def entity_id(self): return self._entity_id @property - def update_fields(self): - return self._update_fields + def update_data(self): + return self._update_data def to_mongo_operation(self): unset_data = {} set_data = {} - for key, value in self._update_fields.items(): + for key, value in self._update_data.items(): if value is REMOVED_VALUE: unset_data[key] = value else: @@ -120,22 +139,24 @@ class UpdateOperation(AbstractOperation): def to_data(self): fields = {} - for key, value in self._update_fields.items(): + for key, value in self._update_data.items(): if value is REMOVED_VALUE: value = None fields[key] = value - return { - "operation": "update", - "entity_type": self.entity_type, + output = super(UpdateOperation, self).to_data() + output.update({ "entity_id": str(self.entity_id), "fields": fields - } + }) + return output class DeleteOperation(AbstractOperation): - def __init__(self, entity_type, entity_id): - super(DeleteOperation, self).__init__(entity_type) + operation_name = "delete" + + def __init__(self, project_name, entity_type, entity_id): + super(DeleteOperation, self).__init__(project_name, entity_type) self._entity_id = ObjectId(entity_id) @@ -147,11 +168,9 @@ class DeleteOperation(AbstractOperation): return DeleteOne({"_id": self.entity_id}) def to_data(self): - return { - "operation": "delete", - "entity_type": self.entity_type, - "entity_id": str(self.entity_id) - } + output = super(DeleteOperation, self).to_data() + output["entity_id"] = self.entity_id + return output class OperationsSession(object): @@ -168,14 +187,9 @@ class OperationsSession(object): project_name (str): Project name to which are operations related. """ - def __init__(self, project_name): - self._project_name = project_name + def __init__(self): self._operations = [] - @property - def project_name(self): - return self._project_name - def add(self, operation): """Add operation to be processed. @@ -223,13 +237,10 @@ class OperationsSession(object): self._operations = [] def to_data(self): - return { - "project_name": self.project_name, - "operations": [ - operation.to_data() - for operation in self._operations - ] - } + return [ + operation.to_data() + for operation in self._operations + ] def commit(self): """Commit session operations.""" @@ -238,12 +249,34 @@ class OperationsSession(object): if not operations: return - bulk_writes = [] + operations_by_project = collections.defaultdict(list) for operation in operations: - mongo_op = operation.to_mongo_operation() - if mongo_op is not None: - bulk_writes.append(mongo_op) + operations_by_project[operation.project_name].append(operation) - if bulk_writes: - collection = get_project_connection(self.project_name) - collection.bulk_write(bulk_writes) + for project_name, operations in operations_by_project.items(): + bulk_writes = [] + for operation in operations: + mongo_op = operation.to_mongo_operation() + if mongo_op is not None: + bulk_writes.append(mongo_op) + + if bulk_writes: + collection = get_project_connection(project_name) + collection.bulk_write(bulk_writes) + + def create_entity(self, project_name, entity_type, data): + operation = CreateOperation(project_name, entity_type, data) + self.add(operation) + return operation + + def update_entity(self, project_name, entity_type, entity_id, update_data): + operation = UpdateOperation( + project_name, entity_type, entity_id, update_data + ) + self.add(operation) + return operation + + def delete_entity(self, project_name, entity_type, entity_id): + operation = DeleteOperation(project_name, entity_type, entity_id) + self.add(operation) + return operation From f39623d99138bee79021e87f476c7abca14e1bb2 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 29 Jul 2022 19:22:14 +0200 Subject: [PATCH 631/785] added helper functions to create new documents --- openpype/client/operations.py | 126 +++++++++++++++++++++++++++++++++- 1 file changed, 125 insertions(+), 1 deletion(-) diff --git a/openpype/client/operations.py b/openpype/client/operations.py index 517a53c27f..db3071abef 100644 --- a/openpype/client/operations.py +++ b/openpype/client/operations.py @@ -1,6 +1,8 @@ +import re import uuid import copy -from abc import ABCMeta, abstractmethod +import collections +from abc import ABCMeta, abstractmethod, abstractproperty import six from bson.objectid import ObjectId @@ -10,6 +12,128 @@ from .mongo import get_project_connection REMOVED_VALUE = object() +CURRENT_PROJECT_SCHEMA = "openpype:project-3.0" +CURRENT_PROJECT_CONFIG_SCHEMA = "openpype:config-2.0" +CURRENT_ASSET_DOC_SCHEMA = "openpype:asset-3.0" +CURRENT_SUBSET_SCHEMA = "openpype:subset-3.0" +CURRENT_VERSION_SCHEMA = "openpype:version-3.0" +CURRENT_REPRESENTATION_SCHEMA = "openpype:representation-2.0" + + +def _create_or_convert_to_mongo_id(mongo_id): + if mongo_id is None: + return ObjectId() + return ObjectId(mongo_id) + + +def new_project_document( + project_name, project_code, config, data=None, entity_id=None +): + if data is None: + data = {} + + data["code"] = project_code + + return { + "_id": _create_or_convert_to_mongo_id(entity_id), + "name": project_name, + "type": CURRENT_PROJECT_SCHEMA, + "data": data, + "config": config + } + + +def new_asset_document( + name, project_id, parent_id, parents, data=None, entity_id=None +): + if data is None: + data = {} + if parent_id is not None: + parent_id = ObjectId(parent_id) + data["visualParent"] = parent_id + data["parents"] = parents + + return { + "_id": _create_or_convert_to_mongo_id(entity_id), + "type": "asset", + "name": name, + "parent": ObjectId(project_id), + "data": data, + "schema": CURRENT_ASSET_DOC_SCHEMA + } + + +def new_subset_document(name, family, asset_id, data=None, entity_id=None): + if data is None: + data = {} + data["family"] = family + return { + "_id": _create_or_convert_to_mongo_id(entity_id), + "schema": CURRENT_SUBSET_SCHEMA, + "type": "subset", + "name": name, + "data": data, + "parent": asset_id + } + + +def new_version_doc(version, subset_id, data=None, entity_id=None): + if data is None: + data = {} + + return { + "_id": _create_or_convert_to_mongo_id(entity_id), + "schema": CURRENT_VERSION_SCHEMA, + "type": "version", + "name": int(version), + "parent": subset_id, + "data": data + } + + +def new_representation_doc( + name, version_id, context, data=None, entity_id=None +): + if data is None: + data = {} + + return { + "_id": _create_or_convert_to_mongo_id(entity_id), + "schema": CURRENT_REPRESENTATION_SCHEMA, + "type": "representation", + "parent": version_id, + "name": name, + "data": data, + + # Imprint shortcut to context for performance reasons. + "context": context + } + + +def _prepare_update_data(old_doc, new_doc, replace): + changes = {} + for key, value in new_doc.items(): + if key not in old_doc or value != old_doc[key]: + changes[key] = value + + if replace: + for key in old_doc.keys(): + if key not in new_doc: + changes[key] = REMOVED_VALUE + return changes + + +def prepare_subset_update_data(old_doc, new_doc, replace=True): + return _prepare_update_data(old_doc, new_doc, replace) + + +def prepare_version_update_data(old_doc, new_doc, replace=True): + return _prepare_update_data(old_doc, new_doc, replace) + + +def prepare_representation_update_data(old_doc, new_doc, replace=True): + return _prepare_update_data(old_doc, new_doc, replace) + @six.add_metaclass(ABCMeta) class AbstractOperation(object): From 8b482a0a1f88f7c9931b8ce4f5ad08096c7f896a Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 29 Jul 2022 19:22:54 +0200 Subject: [PATCH 632/785] update oprations in integrator --- openpype/plugins/publish/integrate.py | 176 ++++++++++++++------------ 1 file changed, 98 insertions(+), 78 deletions(-) diff --git a/openpype/plugins/publish/integrate.py b/openpype/plugins/publish/integrate.py index d817595888..b7d48fe9cf 100644 --- a/openpype/plugins/publish/integrate.py +++ b/openpype/plugins/publish/integrate.py @@ -5,8 +5,16 @@ import copy import clique import six +from openpype.client.operations import ( + OperationsSession, + new_subset_document, + new_version_doc, + new_representation_doc, + prepare_subset_update_data, + prepare_version_update_data, + prepare_representation_update_data, +) from bson.objectid import ObjectId -from pymongo import DeleteMany, ReplaceOne, InsertOne, UpdateOne import pyblish.api from openpype.client import ( @@ -282,9 +290,12 @@ class IntegrateAsset(pyblish.api.InstancePlugin): template_name = self.get_template_name(instance) - subset, subset_writes = self.prepare_subset(instance, project_name) - version, version_writes = self.prepare_version( - instance, subset, project_name + op_session = OperationsSession() + subset = self.prepare_subset( + instance, op_session, project_name + ) + version = self.prepare_version( + instance, op_session, subset, project_name ) instance.data["versionEntity"] = version @@ -334,7 +345,8 @@ class IntegrateAsset(pyblish.api.InstancePlugin): # Transaction to reduce the chances of another publish trying to # publish to the same version number since that chance can greatly # increase if the file transaction takes a long time. - legacy_io.bulk_write(subset_writes + version_writes) + op_session.commit() + self.log.info("Subset {subset[name]} and Version {version[name]} " "written to database..".format(subset=subset, version=version)) @@ -366,49 +378,49 @@ class IntegrateAsset(pyblish.api.InstancePlugin): # Finalize the representations now the published files are integrated # Get 'files' info for representations and its attached resources - representation_writes = [] new_repre_names_low = set() for prepared in prepared_representations: - representation = prepared["representation"] + repre_doc = prepared["representation"] + repre_update_data = prepared["repre_doc_update_data"] transfers = prepared["transfers"] destinations = [dst for src, dst in transfers] - representation["files"] = self.get_files_info( + repre_doc["files"] = self.get_files_info( destinations, sites=sites, anatomy=anatomy ) # Add the version resource file infos to each representation - representation["files"] += resource_file_infos + repre_doc["files"] += resource_file_infos # Set up representation for writing to the database. Since # we *might* be overwriting an existing entry if the version # already existed we'll use ReplaceOnce with `upsert=True` - representation_writes.append(ReplaceOne( - filter={"_id": representation["_id"]}, - replacement=representation, - upsert=True - )) + if repre_update_data is None: + op_session.create_entity( + project_name, repre_doc["type"], repre_doc + ) + else: + op_session.update_entity( + project_name, + repre_doc["type"], + repre_doc["_id"], + repre_update_data + ) - new_repre_names_low.add(representation["name"].lower()) + new_repre_names_low.add(repre_doc["name"].lower()) # Delete any existing representations that didn't get any new data # if the instance is not set to append mode if not instance.data.get("append", False): - delete_names = set() for name, existing_repres in existing_repres_by_name.items(): if name not in new_repre_names_low: # We add the exact representation name because `name` is # lowercase for name matching only and not in the database - delete_names.add(existing_repres["name"]) - if delete_names: - representation_writes.append(DeleteMany( - filter={ - "parent": version["_id"], - "name": {"$in": list(delete_names)} - } - )) + op_session.delete_entity( + project_name, "representation", existing_repres["_id"] + ) - # Write representations to the database - legacy_io.bulk_write(representation_writes) + self.log.debug("{}".format(op_session.to_data())) + op_session.commit() # Backwards compatibility # todo: can we avoid the need to store this? @@ -419,13 +431,14 @@ class IntegrateAsset(pyblish.api.InstancePlugin): self.log.info("Registered {} representations" "".format(len(prepared_representations))) - def prepare_subset(self, instance, project_name): + def prepare_subset(self, instance, op_session, project_name): asset_doc = instance.data["assetEntity"] subset_name = instance.data["subset"] + family = instance.data["family"] self.log.debug("Subset: {}".format(subset_name)) # Get existing subset if it exists - subset_doc = get_subset_by_name( + existing_subset_doc = get_subset_by_name( project_name, subset_name, asset_doc["_id"] ) @@ -438,69 +451,79 @@ class IntegrateAsset(pyblish.api.InstancePlugin): if subset_group: data["subsetGroup"] = subset_group - bulk_writes = [] - if subset_doc is None: + subset_id = None + if existing_subset_doc: + subset_id = existing_subset_doc["_id"] + subset_doc = new_subset_document( + subset_name, family, asset_doc["_id"], data, subset_id + ) + + if existing_subset_doc is None: # Create a new subset self.log.info("Subset '%s' not found, creating ..." % subset_name) - subset_doc = { - "_id": ObjectId(), - "schema": "openpype:subset-3.0", - "type": "subset", - "name": subset_name, - "data": data, - "parent": asset_doc["_id"] - } - bulk_writes.append(InsertOne(subset_doc)) + op_session.create_entity( + project_name, subset_doc["type"], subset_doc + ) else: # Update existing subset data with new data and set in database. # We also change the found subset in-place so we don't need to # re-query the subset afterwards subset_doc["data"].update(data) - bulk_writes.append(UpdateOne( - {"type": "subset", "_id": subset_doc["_id"]}, - {"$set": { - "data": subset_doc["data"] - }} - )) + update_data = prepare_subset_update_data( + existing_subset_doc, subset_doc + ) + op_session.update_entity( + project_name, + subset_doc["type"], + subset_doc["_id"], + update_data + ) self.log.info("Prepared subset: {}".format(subset_name)) - return subset_doc, bulk_writes + return subset_doc - def prepare_version(self, instance, subset_doc, project_name): + def prepare_version(self, instance, op_session, subset_doc, project_name): version_number = instance.data["version"] - version_doc = { - "schema": "openpype:version-3.0", - "type": "version", - "parent": subset_doc["_id"], - "name": version_number, - "data": self.create_version_data(instance) - } - existing_version = get_version_by_name( project_name, version_number, subset_doc["_id"], fields=["_id"] ) + version_id = None + if existing_version: + version_id = existing_version["_id"] + + version_data = self.create_version_data(instance) + version_doc = new_version_doc( + version_number, + subset_doc["_id"], + version_data, + version_id + ) if existing_version: self.log.debug("Updating existing version ...") - version_doc["_id"] = existing_version["_id"] + update_data = prepare_version_update_data( + existing_version, version_doc + ) + op_session.update_entity( + project_name, + version_doc["type"], + version_doc["_id"], + update_data + ) else: self.log.debug("Creating new version ...") - version_doc["_id"] = ObjectId() - - bulk_writes = [ReplaceOne( - filter={"_id": version_doc["_id"]}, - replacement=version_doc, - upsert=True - )] + op_session.create_entity( + project_name, version_doc["type"], version_doc + ) self.log.info("Prepared version: v{0:03d}".format(version_doc["name"])) - return version_doc, bulk_writes + return version_doc def prepare_representation(self, repre, template_name, @@ -696,10 +719,9 @@ class IntegrateAsset(pyblish.api.InstancePlugin): # Use previous representation's id if there is a name match existing = existing_repres_by_name.get(repre["name"].lower()) + repre_id = None if existing: repre_id = existing["_id"] - else: - repre_id = ObjectId() # Store first transferred destination as published path data # - used primarily for reviews that are integrated to custom modules @@ -713,20 +735,18 @@ class IntegrateAsset(pyblish.api.InstancePlugin): # and the actual representation entity for the database data = repre.get("data", {}) data.update({"path": published_path, "template": template}) - representation = { - "_id": repre_id, - "schema": "openpype:representation-2.0", - "type": "representation", - "parent": version["_id"], - "name": repre["name"], - "data": data, - - # Imprint shortcut to context for performance reasons. - "context": repre_context - } + repre_doc = new_representation_doc( + repre["name"], version["_id"], repre_context, data, repre_id + ) + update_data = None + if repre_id is not None: + update_data = prepare_representation_update_data( + existing, repre_doc + ) return { - "representation": representation, + "representation": repre_doc, + "repre_doc_update_data": update_data, "anatomy_data": template_data, "transfers": transfers, # todo: avoid the need for 'published_files' used by Integrate Hero From 1f126977fa52d55c9874ae87f3f2b7494ae8eeb2 Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Fri, 29 Jul 2022 21:07:29 +0300 Subject: [PATCH 633/785] Style fixes. --- openpype/hosts/maya/api/lib_rendersettings.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/maya/api/lib_rendersettings.py b/openpype/hosts/maya/api/lib_rendersettings.py index 7eae5bbbbf..6154e1ab89 100644 --- a/openpype/hosts/maya/api/lib_rendersettings.py +++ b/openpype/hosts/maya/api/lib_rendersettings.py @@ -76,10 +76,9 @@ class RenderSettings(object): prefix = self._image_prefixes[renderer] prefix = prefix.replace("{aov_separator}", aov_separator) cmds.setAttr(self._image_prefix_nodes[renderer], - prefix, - type="string") + prefix, type="string") # noqa else: - print("{0} isn't a supported renderer to autoset settings.".format(renderer)) + print("{0} isn't a supported renderer to autoset settings.".format(renderer)) # noqa # TODO: handle not having res values in the doc width = asset_doc["data"].get("resolutionWidth") From 487830fbbbedb783375fd9c9eee58e4c4cfb2841 Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Fri, 29 Jul 2022 21:08:11 +0300 Subject: [PATCH 634/785] Style fix --- openpype/hosts/maya/plugins/create/create_render.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/create/create_render.py b/openpype/hosts/maya/plugins/create/create_render.py index 395984aee0..fbe670b1ea 100644 --- a/openpype/hosts/maya/plugins/create/create_render.py +++ b/openpype/hosts/maya/plugins/create/create_render.py @@ -87,7 +87,7 @@ class CreateRender(plugin.Creator): return self._project_settings = get_project_settings( legacy_io.Session["AVALON_PROJECT"]) - if self._project_settings["maya"]["RenderSettings"]["apply_render_settings"]: + if self._project_settings["maya"]["RenderSettings"]["apply_render_settings"]: # noqa lib_rendersettings.RenderSettings().set_default_renderer_settings() manager = ModulesManager() self.deadline_module = manager.modules_by_name["deadline"] From 3426bd74d89c7dfb71de0b2adf1fc06078fc763c Mon Sep 17 00:00:00 2001 From: OpenPype Date: Sat, 30 Jul 2022 04:04:29 +0000 Subject: [PATCH 635/785] [Automated] Bump version --- CHANGELOG.md | 48 ++++++++++++++++++++++++++------------------- openpype/version.py | 2 +- pyproject.toml | 2 +- 3 files changed, 30 insertions(+), 22 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index e4fc1d59ca..eab4e5e45e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,8 +1,34 @@ # Changelog +## [3.12.3-nightly.1](https://github.com/pypeclub/OpenPype/tree/HEAD) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.12.2...HEAD) + +**🆕 New features** + +- Traypublisher: simple editorial publishing [\#3492](https://github.com/pypeclub/OpenPype/pull/3492) + +**🚀 Enhancements** + +- Kitsu: Shot&Sequence name with prefix over appends [\#3593](https://github.com/pypeclub/OpenPype/pull/3593) +- Ftrack: Update ftrack api to 2.3.3 [\#3588](https://github.com/pypeclub/OpenPype/pull/3588) +- General: New Integrator small fixes [\#3583](https://github.com/pypeclub/OpenPype/pull/3583) + +**🐛 Bug fixes** + +- Editorial publishing workflow improvements [\#3580](https://github.com/pypeclub/OpenPype/pull/3580) +- Nuke: render family integration consistency [\#3576](https://github.com/pypeclub/OpenPype/pull/3576) +- Ftrack: Handle missing published path in integrator [\#3570](https://github.com/pypeclub/OpenPype/pull/3570) +- Nuke: publish existing frames with slate with correct range [\#3555](https://github.com/pypeclub/OpenPype/pull/3555) + +**🔀 Refactored code** + +- General: Separate extraction of template data into more functions [\#3574](https://github.com/pypeclub/OpenPype/pull/3574) +- General: Lib cleanup [\#3571](https://github.com/pypeclub/OpenPype/pull/3571) + ## [3.12.2](https://github.com/pypeclub/OpenPype/tree/3.12.2) (2022-07-27) -[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.12.1...3.12.2) +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.12.2-nightly.4...3.12.2) ### 📖 Documentation @@ -38,9 +64,9 @@ - General: Fix hash of centos oiio archive [\#3519](https://github.com/pypeclub/OpenPype/pull/3519) - Maya: Renderman display output fix [\#3514](https://github.com/pypeclub/OpenPype/pull/3514) - TrayPublisher: Simple creation enhancements and fixes [\#3513](https://github.com/pypeclub/OpenPype/pull/3513) -- NewPublisher: Publish attributes are properly collected [\#3510](https://github.com/pypeclub/OpenPype/pull/3510) - TrayPublisher: Make sure host name is filled [\#3504](https://github.com/pypeclub/OpenPype/pull/3504) - NewPublisher: Groups work and enum multivalue [\#3501](https://github.com/pypeclub/OpenPype/pull/3501) +- Resolve: removed few bugs [\#3464](https://github.com/pypeclub/OpenPype/pull/3464) **🔀 Refactored code** @@ -61,10 +87,6 @@ [Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.12.1-nightly.6...3.12.1) -### 📖 Documentation - -- Docs: Added minimal permissions for MongoDB [\#3441](https://github.com/pypeclub/OpenPype/pull/3441) - **🚀 Enhancements** - TrayPublisher: Added more options for grouping of instances [\#3494](https://github.com/pypeclub/OpenPype/pull/3494) @@ -73,7 +95,6 @@ - General: Creator Plugins have access to project [\#3476](https://github.com/pypeclub/OpenPype/pull/3476) - General: Better arguments order in creator init [\#3475](https://github.com/pypeclub/OpenPype/pull/3475) - Ftrack: Trigger custom ftrack events on project creation and preparation [\#3465](https://github.com/pypeclub/OpenPype/pull/3465) -- Windows installer: Clean old files and add version subfolder [\#3445](https://github.com/pypeclub/OpenPype/pull/3445) **🐛 Bug fixes** @@ -86,25 +107,12 @@ - Kitsu: bugfix with sync-service ans publish plugins [\#3473](https://github.com/pypeclub/OpenPype/pull/3473) - Flame: solved problem with multi-selected loading [\#3470](https://github.com/pypeclub/OpenPype/pull/3470) - General: Fix query function in update logic [\#3468](https://github.com/pypeclub/OpenPype/pull/3468) -- Resolve: removed few bugs [\#3464](https://github.com/pypeclub/OpenPype/pull/3464) - General: Delete old versions is safer when ftrack is disabled [\#3462](https://github.com/pypeclub/OpenPype/pull/3462) -- Nuke: fixing metadata slate TC difference [\#3455](https://github.com/pypeclub/OpenPype/pull/3455) -- Nuke: prerender reviewable fails [\#3450](https://github.com/pypeclub/OpenPype/pull/3450) -- Maya: fix hashing in Python 3 for tile rendering [\#3447](https://github.com/pypeclub/OpenPype/pull/3447) -- LogViewer: Escape html characters in log message [\#3443](https://github.com/pypeclub/OpenPype/pull/3443) **🔀 Refactored code** - Maya: Merge animation + pointcache extractor logic [\#3461](https://github.com/pypeclub/OpenPype/pull/3461) - Maya: Re-use `maintained\_time` from lib [\#3460](https://github.com/pypeclub/OpenPype/pull/3460) -- General: Use query functions in global plugins [\#3459](https://github.com/pypeclub/OpenPype/pull/3459) -- Clockify: Use query functions in clockify actions [\#3458](https://github.com/pypeclub/OpenPype/pull/3458) -- General: Use query functions in rest api calls [\#3457](https://github.com/pypeclub/OpenPype/pull/3457) -- General: Use query functions in openpype lib functions [\#3454](https://github.com/pypeclub/OpenPype/pull/3454) -- General: Use query functions in load utils [\#3446](https://github.com/pypeclub/OpenPype/pull/3446) -- General: Move publish plugin and publish render abstractions [\#3442](https://github.com/pypeclub/OpenPype/pull/3442) -- General: Use Anatomy after move to pipeline [\#3436](https://github.com/pypeclub/OpenPype/pull/3436) -- General: Anatomy moved to pipeline [\#3435](https://github.com/pypeclub/OpenPype/pull/3435) ## [3.12.0](https://github.com/pypeclub/OpenPype/tree/3.12.0) (2022-06-28) diff --git a/openpype/version.py b/openpype/version.py index 5c39e9e630..03fd5fb96e 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.12.2" +__version__ = "3.12.3-nightly.1" diff --git a/pyproject.toml b/pyproject.toml index 175e72be24..118355395a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "OpenPype" -version = "3.12.2" # OpenPype +version = "3.12.3-nightly.1" # OpenPype description = "Open VFX and Animation pipeline with support." authors = ["OpenPype Team "] license = "MIT License" From 23866fee29fd3eded8a9c6c5e82442f20ca5a596 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 1 Aug 2022 12:43:27 +0200 Subject: [PATCH 636/785] added some docstrings --- openpype/client/operations.py | 180 +++++++++++++++++++++++++++++++++- 1 file changed, 179 insertions(+), 1 deletion(-) diff --git a/openpype/client/operations.py b/openpype/client/operations.py index db3071abef..908566fca6 100644 --- a/openpype/client/operations.py +++ b/openpype/client/operations.py @@ -29,6 +29,24 @@ def _create_or_convert_to_mongo_id(mongo_id): def new_project_document( project_name, project_code, config, data=None, entity_id=None ): + """Create skeleton data of project document. + + Args: + project_name (str): Name of project. Used as identifier of a project. + project_code (str): Shorter version of projet without spaces and + special characters (in most of cases). Should be also considered + as unique name across projects. + config (Dic[str, Any]): Project config consist of roots, templates, + applications and other project Anatomy related data. + data (Dict[str, Any]): Project data with information about it's + attributes (e.g. 'fps' etc.) or integration specific keys. + entity_id (Union[str, ObjectId]): Predefined id of document. New id is + created if not passed. + + Returns: + Dict[str, Any]: Skeleton of project document. + """ + if data is None: data = {} @@ -46,6 +64,22 @@ def new_project_document( def new_asset_document( name, project_id, parent_id, parents, data=None, entity_id=None ): + """Create skeleton data of asset document. + + Args: + name (str): Is considered as unique identifier of asset in project. + project_id (Union[str, ObjectId]): Id of project doument. + parent_id (Union[str, ObjectId]): Id of parent asset. + parents (List[str]): List of parent assets names. + data (Dict[str, Any]): Asset document data. Empty dictionary is used + if not passed. Value of 'parent_id' is used to fill 'visualParent'. + entity_id (Union[str, ObjectId]): Predefined id of document. New id is + created if not passed. + + Returns: + Dict[str, Any]: Skeleton of asset document. + """ + if data is None: data = {} if parent_id is not None: @@ -64,6 +98,21 @@ def new_asset_document( def new_subset_document(name, family, asset_id, data=None, entity_id=None): + """Create skeleton data of subset document. + + Args: + name (str): Is considered as unique identifier of subset under asset. + family (str): Subset's family. + asset_id (Union[str, ObjectId]): Id of parent asset. + data (Dict[str, Any]): Subset document data. Empty dictionary is used + if not passed. Value of 'family' is used to fill 'family'. + entity_id (Union[str, ObjectId]): Predefined id of document. New id is + created if not passed. + + Returns: + Dict[str, Any]: Skeleton of subset document. + """ + if data is None: data = {} data["family"] = family @@ -78,6 +127,20 @@ def new_subset_document(name, family, asset_id, data=None, entity_id=None): def new_version_doc(version, subset_id, data=None, entity_id=None): + """Create skeleton data of version document. + + Args: + version (int): Is considered as unique identifier of version + under subset. + subset_id (Union[str, ObjectId]): Id of parent subset. + data (Dict[str, Any]): Version document data. + entity_id (Union[str, ObjectId]): Predefined id of document. New id is + created if not passed. + + Returns: + Dict[str, Any]: Skeleton of version document. + """ + if data is None: data = {} @@ -94,6 +157,22 @@ def new_version_doc(version, subset_id, data=None, entity_id=None): def new_representation_doc( name, version_id, context, data=None, entity_id=None ): + """Create skeleton data of asset document. + + Args: + version (int): Is considered as unique identifier of version + under subset. + version_id (Union[str, ObjectId]): Id of parent version. + context (Dict[str, Any]): Representation context used for fill template + of to query. + data (Dict[str, Any]): Representation document data. + entity_id (Union[str, ObjectId]): Predefined id of document. New id is + created if not passed. + + Returns: + Dict[str, Any]: Skeleton of version document. + """ + if data is None: data = {} @@ -124,20 +203,59 @@ def _prepare_update_data(old_doc, new_doc, replace): def prepare_subset_update_data(old_doc, new_doc, replace=True): + """Compare two subset documents and prepare update data. + + Based on compared values will create update data for 'UpdateOperation'. + + Empty output means that documents are identical. + + Returns: + Dict[str, Any]: Changes between old and new document. + """ + return _prepare_update_data(old_doc, new_doc, replace) def prepare_version_update_data(old_doc, new_doc, replace=True): + """Compare two version documents and prepare update data. + + Based on compared values will create update data for 'UpdateOperation'. + + Empty output means that documents are identical. + + Returns: + Dict[str, Any]: Changes between old and new document. + """ + return _prepare_update_data(old_doc, new_doc, replace) def prepare_representation_update_data(old_doc, new_doc, replace=True): + """Compare two representation documents and prepare update data. + + Based on compared values will create update data for 'UpdateOperation'. + + Empty output means that documents are identical. + + Returns: + Dict[str, Any]: Changes between old and new document. + """ + return _prepare_update_data(old_doc, new_doc, replace) @six.add_metaclass(ABCMeta) class AbstractOperation(object): - """Base operation class.""" + """Base operation class. + + Opration represent a call into database. The call can create, change or + remove data. + + Args: + project_name (str): On which project operation will happen. + entity_type (str): Type of entity on which change happens. + e.g. 'asset', 'representation' etc. + """ def __init__(self, project_name, entity_type): self._project_name = project_name @@ -150,6 +268,8 @@ class AbstractOperation(object): @property def id(self): + """Identifier of operation.""" + return self._id @property @@ -158,13 +278,23 @@ class AbstractOperation(object): @abstractproperty def operation_name(self): + """Stringified type of operation.""" + pass @abstractmethod def to_mongo_operation(self): + """Convert operation to Mongo batch operation.""" + pass def to_data(self): + """Convert opration to data that can be converted to json or others. + + Returns: + Dict[str, Any]: Description of operation. + """ + return { "id": self._id, "entity_type": self.entity_type, @@ -174,6 +304,15 @@ class AbstractOperation(object): class CreateOperation(AbstractOperation): + """Opeartion to create an entity. + + Args: + project_name (str): On which project operation will happen. + entity_type (str): Type of entity on which change happens. + e.g. 'asset', 'representation' etc. + data (Dict[str, Any]): Data of entity that will be created. + """ + operation_name = "create" def __init__(self, project_name, entity_type, data): @@ -222,6 +361,18 @@ class CreateOperation(AbstractOperation): class UpdateOperation(AbstractOperation): + """Opeartion to update an entity. + + Args: + project_name (str): On which project operation will happen. + entity_type (str): Type of entity on which change happens. + e.g. 'asset', 'representation' etc. + entity_id (Union[str, ObjectId]): Identifier of an entity. + update_data (Dict[str, Any]): Key -> value changes that will be set in + database. If value is set to 'REMOVED_VALUE' the key will be + removed. Only first level of dictionary is checked (on purpose). + """ + operation_name = "update" def __init__(self, project_name, entity_type, entity_id, update_data): @@ -277,6 +428,15 @@ class UpdateOperation(AbstractOperation): class DeleteOperation(AbstractOperation): + """Opeartion to delete an entity. + + Args: + project_name (str): On which project operation will happen. + entity_type (str): Type of entity on which change happens. + e.g. 'asset', 'representation' etc. + entity_id (Union[str, ObjectId]): Entity id that will be removed. + """ + operation_name = "delete" def __init__(self, project_name, entity_type, entity_id): @@ -389,11 +549,23 @@ class OperationsSession(object): collection.bulk_write(bulk_writes) def create_entity(self, project_name, entity_type, data): + """Fast access to 'CreateOperation'. + + Returns: + CreateOperation: Object of update operation. + """ + operation = CreateOperation(project_name, entity_type, data) self.add(operation) return operation def update_entity(self, project_name, entity_type, entity_id, update_data): + """Fast access to 'UpdateOperation'. + + Returns: + UpdateOperation: Object of update operation. + """ + operation = UpdateOperation( project_name, entity_type, entity_id, update_data ) @@ -401,6 +573,12 @@ class OperationsSession(object): return operation def delete_entity(self, project_name, entity_type, entity_id): + """Fast access to 'DeleteOperation'. + + Returns: + DeleteOperation: Object of delete operation. + """ + operation = DeleteOperation(project_name, entity_type, entity_id) self.add(operation) return operation From 7de3d76075937309b4e07da3c7383650ebdd5c0a Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 1 Aug 2022 12:46:44 +0200 Subject: [PATCH 637/785] removed unused import --- openpype/client/operations.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/client/operations.py b/openpype/client/operations.py index 908566fca6..dfb1d8c4dd 100644 --- a/openpype/client/operations.py +++ b/openpype/client/operations.py @@ -1,4 +1,3 @@ -import re import uuid import copy import collections From a29766385b07ef09837d611b4583583177a57da4 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 1 Aug 2022 12:52:22 +0200 Subject: [PATCH 638/785] return created instance --- openpype/hosts/traypublisher/plugins/create/create_editorial.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/openpype/hosts/traypublisher/plugins/create/create_editorial.py b/openpype/hosts/traypublisher/plugins/create/create_editorial.py index e9bca79b31..28a115629e 100644 --- a/openpype/hosts/traypublisher/plugins/create/create_editorial.py +++ b/openpype/hosts/traypublisher/plugins/create/create_editorial.py @@ -81,6 +81,8 @@ class EditorialClipInstanceCreatorBase(HiddenTrayPublishCreator): self._store_new_instance(new_instance) + return new_instance + def get_instance_attr_defs(self): return [ BoolDef( From 5f5aba7ae3a37ee27db59f4b651f7f85d1316a51 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 1 Aug 2022 13:38:00 +0200 Subject: [PATCH 639/785] loader plugins can handle settings on their own --- openpype/pipeline/load/plugins.py | 49 +++++++++++++++++++++++++++++-- 1 file changed, 47 insertions(+), 2 deletions(-) diff --git a/openpype/pipeline/load/plugins.py b/openpype/pipeline/load/plugins.py index a30a2188a4..233aace035 100644 --- a/openpype/pipeline/load/plugins.py +++ b/openpype/pipeline/load/plugins.py @@ -1,6 +1,7 @@ +import os import logging -from openpype.lib import set_plugin_attributes_from_settings +from openpype.settings import get_system_settings, get_project_settings from openpype.pipeline.plugin_discover import ( discover, register_plugin, @@ -37,6 +38,46 @@ class LoaderPlugin(list): def __init__(self, context): self.fname = self.filepath_from_context(context) + @classmethod + def apply_settings(cls, project_settings, system_settings): + host_name = os.environ.get("AVALON_APP") + plugin_type = "load" + plugin_type_settings = ( + project_settings + .get(host_name, {}) + .get(plugin_type, {}) + ) + global_type_settings = ( + project_settings + .get("global", {}) + .get(plugin_type, {}) + ) + if not global_type_settings and not plugin_type_settings: + return + + plugin_name = cls.__name__ + + plugin_settings = None + # Look for plugin settings in host specific settings + if plugin_name in plugin_type_settings: + plugin_settings = plugin_type_settings[plugin_name] + + # Look for plugin settings in global settings + elif plugin_name in global_type_settings: + plugin_settings = global_type_settings[plugin_name] + + if not plugin_settings: + return + + print(">>> We have preset for {}".format(plugin_name)) + for option, value in plugin_settings.items(): + if option == "enabled" and value is False: + setattr(cls, "active", False) + print(" - is disabled by preset") + else: + setattr(cls, option, value) + print(" - setting `{}`: `{}`".format(option, value)) + @classmethod def get_representations(cls): return cls.representations @@ -112,7 +153,11 @@ class SubsetLoaderPlugin(LoaderPlugin): def discover_loader_plugins(): plugins = discover(LoaderPlugin) - set_plugin_attributes_from_settings(plugins, LoaderPlugin) + project_name = os.environ.get("AVALON_PROJECT") + system_settings = get_system_settings() + project_settings = get_project_settings(project_name) + for plugin in plugins: + plugin.apply_settings(project_settings, system_settings) return plugins From b2d5146288a6b4c9ca9e0c3fc0adf339a902ec35 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 1 Aug 2022 13:38:18 +0200 Subject: [PATCH 640/785] LegacyCreator plugin can handle settings on their own --- openpype/pipeline/create/creator_plugins.py | 13 ++++--- openpype/pipeline/create/legacy_create.py | 43 +++++++++++++++++++++ 2 files changed, 51 insertions(+), 5 deletions(-) diff --git a/openpype/pipeline/create/creator_plugins.py b/openpype/pipeline/create/creator_plugins.py index 8cb161de20..4a1630d8ef 100644 --- a/openpype/pipeline/create/creator_plugins.py +++ b/openpype/pipeline/create/creator_plugins.py @@ -1,3 +1,4 @@ +import os import copy from abc import ( @@ -7,10 +8,8 @@ from abc import ( ) import six -from openpype.lib import ( - get_subset_name_with_asset_doc, - set_plugin_attributes_from_settings, -) +from openpype.settings import get_system_settings, get_project_settings +from openpype.lib import get_subset_name_with_asset_doc from openpype.pipeline.plugin_discover import ( discover, register_plugin, @@ -439,7 +438,11 @@ def discover_creator_plugins(): def discover_legacy_creator_plugins(): plugins = discover(LegacyCreator) - set_plugin_attributes_from_settings(plugins, LegacyCreator) + project_name = os.environ.get("AVALON_PROJECT") + system_settings = get_system_settings() + project_settings = get_project_settings(project_name) + for plugin in plugins: + plugin.apply_settings(project_settings, system_settings) return plugins diff --git a/openpype/pipeline/create/legacy_create.py b/openpype/pipeline/create/legacy_create.py index 46e0e3d663..2764b3cb95 100644 --- a/openpype/pipeline/create/legacy_create.py +++ b/openpype/pipeline/create/legacy_create.py @@ -5,6 +5,7 @@ Renamed classes and functions - 'create' -> 'legacy_create' """ +import os import logging import collections @@ -37,6 +38,48 @@ class LegacyCreator(object): self.data.update(data or {}) + @classmethod + def apply_settings(cls, project_settings, system_settings): + """Apply OpenPype settings to a plugin class.""" + + host_name = os.environ.get("AVALON_APP") + plugin_type = "create" + plugin_type_settings = ( + project_settings + .get(host_name, {}) + .get(plugin_type, {}) + ) + global_type_settings = ( + project_settings + .get("global", {}) + .get(plugin_type, {}) + ) + if not global_type_settings and not plugin_type_settings: + return + + plugin_name = cls.__name__ + + plugin_settings = None + # Look for plugin settings in host specific settings + if plugin_name in plugin_type_settings: + plugin_settings = plugin_type_settings[plugin_name] + + # Look for plugin settings in global settings + elif plugin_name in global_type_settings: + plugin_settings = global_type_settings[plugin_name] + + if not plugin_settings: + return + + print(">>> We have preset for {}".format(plugin_name)) + for option, value in plugin_settings.items(): + if option == "enabled" and value is False: + setattr(cls, "active", False) + print(" - is disabled by preset") + else: + setattr(cls, option, value) + print(" - setting `{}`: `{}`".format(option, value)) + def process(self): pass From acb4b28b975c8e276602a32237de7ce37773342b Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 1 Aug 2022 14:14:33 +0200 Subject: [PATCH 641/785] moved filter pyblish plugins function to openpype.pipeline.publish.lib --- openpype/pipeline/context_tools.py | 2 +- openpype/pipeline/publish/lib.py | 93 ++++++++++++++++++++++++++++++ 2 files changed, 94 insertions(+), 1 deletion(-) diff --git a/openpype/pipeline/context_tools.py b/openpype/pipeline/context_tools.py index 0535ce5d54..c8c70e5ea8 100644 --- a/openpype/pipeline/context_tools.py +++ b/openpype/pipeline/context_tools.py @@ -18,8 +18,8 @@ from openpype.client import ( ) from openpype.modules import load_modules, ModulesManager from openpype.settings import get_project_settings -from openpype.lib import filter_pyblish_plugins +from .publish.lib import filter_pyblish_plugins from .anatomy import Anatomy from .template_data import get_template_data_with_names from . import ( diff --git a/openpype/pipeline/publish/lib.py b/openpype/pipeline/publish/lib.py index 739b2c8806..d5494cd8a4 100644 --- a/openpype/pipeline/publish/lib.py +++ b/openpype/pipeline/publish/lib.py @@ -6,6 +6,10 @@ import xml.etree.ElementTree import six import pyblish.plugin +import pyblish.api + +from openpype.lib import Logger +from openpype.settings import get_project_settings, get_system_settings class DiscoverResult: @@ -180,3 +184,92 @@ def publish_plugins_discover(paths=None): result.plugins = plugins return result + + +def filter_pyblish_plugins(plugins): + """Pyblish plugin filter which applies OpenPype settings. + + Apply OpenPype settings on discovered plugins. On plugin with implemented + class method 'def apply_settings(cls, project_settings, system_settings)' + is called the method. Default behavior looks for plugin name and current + host name to look for + + Args: + plugins (List[pyblish.plugin.Plugin]): Discovered plugins on which + are applied settings. + """ + + log = Logger.get_logger("filter_pyblish_plugins") + + # TODO: Don't use host from 'pyblish.api' but from defined host by us. + # - kept becau on farm is probably used host 'shell' which propably + # affect how settings are applied there + host = pyblish.api.current_host() + project_name = os.environ.get("AVALON_PROJECT") + + project_setting = get_project_settings(project_name) + system_settings = get_system_settings() + + # iterate over plugins + for plugin in plugins[:]: + if hasattr(plugin, "apply_settings"): + try: + # Use classmethod 'apply_settings' + # - can be used to target settings from custom settings place + # - skip default behavior when successful + plugin.apply_settings(project_setting, system_settings) + continue + + except Exception: + log.warning( + ( + "Failed to apply settings on plugin {}" + ).format(plugin.__name__), + exc_info=True + ) + + try: + config_data = ( + project_setting + [host] + ["publish"] + [plugin.__name__] + ) + except KeyError: + # host determined from path + file = os.path.normpath(inspect.getsourcefile(plugin)) + file = os.path.normpath(file) + + split_path = file.split(os.path.sep) + if len(split_path) < 4: + log.warning( + 'plugin path too short to extract host {}'.format(file) + ) + continue + + host_from_file = split_path[-4] + plugin_kind = split_path[-2] + + # TODO: change after all plugins are moved one level up + if host_from_file == "openpype": + host_from_file = "global" + + try: + config_data = ( + project_setting + [host_from_file] + [plugin_kind] + [plugin.__name__] + ) + except KeyError: + continue + + for option, value in config_data.items(): + if option == "enabled" and value is False: + log.info('removing plugin {}'.format(plugin.__name__)) + plugins.remove(plugin) + else: + log.info('setting {}:{} on plugin {}'.format( + option, value, plugin.__name__)) + + setattr(plugin, option, value) From cf42e8fa44bb61fe1d6f80f8e122b52fb8cc022b Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 1 Aug 2022 14:15:26 +0200 Subject: [PATCH 642/785] mark functions in openpype.lib as deprecated --- openpype/lib/plugin_tools.py | 101 +++++++++++++++++++---------------- 1 file changed, 54 insertions(+), 47 deletions(-) diff --git a/openpype/lib/plugin_tools.py b/openpype/lib/plugin_tools.py index 1d3c1eec6b..c94d1251fc 100644 --- a/openpype/lib/plugin_tools.py +++ b/openpype/lib/plugin_tools.py @@ -1,11 +1,13 @@ # -*- coding: utf-8 -*- """Avalon/Pyblish plugin tools.""" import os -import inspect import logging import re import json +import warnings +import functools + from openpype.client import get_asset_by_id from openpype.settings import get_project_settings @@ -17,6 +19,51 @@ log = logging.getLogger(__name__) DEFAULT_SUBSET_TEMPLATE = "{family}{Variant}" +class PluginToolsDeprecatedWarning(DeprecationWarning): + pass + + +def deprecated(new_destination): + """Mark functions as deprecated. + + It will result in a warning being emitted when the function is used. + """ + + func = None + if callable(new_destination): + func = new_destination + new_destination = None + + def _decorator(decorated_func): + if new_destination is None: + warning_message = ( + " Please check content of deprecated function to figure out" + " possible replacement." + ) + else: + warning_message = " Please replace your usage with '{}'.".format( + new_destination + ) + + @functools.wraps(decorated_func) + def wrapper(*args, **kwargs): + warnings.simplefilter("always", PluginToolsDeprecatedWarning) + warnings.warn( + ( + "Call to deprecated function '{}'" + "\nFunction was moved or removed.{}" + ).format(decorated_func.__name__, warning_message), + category=PluginToolsDeprecatedWarning, + stacklevel=4 + ) + return decorated_func(*args, **kwargs) + return wrapper- + + if func is None: + return _decorator + return _decorator(func) + + class TaskNotSetError(KeyError): def __init__(self, msg=None): if not msg: @@ -197,6 +244,7 @@ def prepare_template_data(fill_pairs): return fill_data +@deprecated("openpype.pipeline.publish.lib.filter_pyblish_plugins") def filter_pyblish_plugins(plugins): """Filter pyblish plugins by presets. @@ -206,57 +254,14 @@ def filter_pyblish_plugins(plugins): Args: plugins (dict): Dictionary of plugins produced by :mod:`pyblish-base` `discover()` method. - """ - from pyblish import api - host = api.current_host() + from openpype.pipeline.publish.lib import filter_pyblish_plugins - presets = get_project_settings(os.environ['AVALON_PROJECT']) or {} - # skip if there are no presets to process - if not presets: - return - - # iterate over plugins - for plugin in plugins[:]: - - try: - config_data = presets[host]["publish"][plugin.__name__] - except KeyError: - # host determined from path - file = os.path.normpath(inspect.getsourcefile(plugin)) - file = os.path.normpath(file) - - split_path = file.split(os.path.sep) - if len(split_path) < 4: - log.warning( - 'plugin path too short to extract host {}'.format(file) - ) - continue - - host_from_file = split_path[-4] - plugin_kind = split_path[-2] - - # TODO: change after all plugins are moved one level up - if host_from_file == "openpype": - host_from_file = "global" - - try: - config_data = presets[host_from_file][plugin_kind][plugin.__name__] # noqa: E501 - except KeyError: - continue - - for option, value in config_data.items(): - if option == "enabled" and value is False: - log.info('removing plugin {}'.format(plugin.__name__)) - plugins.remove(plugin) - else: - log.info('setting {}:{} on plugin {}'.format( - option, value, plugin.__name__)) - - setattr(plugin, option, value) + filter_pyblish_plugins(plugins) +@deprecated def set_plugin_attributes_from_settings( plugins, superclass, host_name=None, project_name=None ): @@ -273,6 +278,8 @@ def set_plugin_attributes_from_settings( project_name (str): Name of project for which settings will be loaded. Value from environment `AVALON_PROJECT` is used if not entered. """ + + # Function is not used anymore from openpype.pipeline import LegacyCreator, LoaderPlugin # determine host application to use for finding presets From 498ee1d85066ca40659b73196f58886682b1e186 Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Mon, 1 Aug 2022 15:15:50 +0300 Subject: [PATCH 643/785] Fix schema to store as lists --- .../projects_schema/schemas/schema_maya_render_settings.json | 2 ++ 1 file changed, 2 insertions(+) diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_render_settings.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_render_settings.json index 9b6b6f1eed..af197604f8 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_render_settings.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_render_settings.json @@ -275,6 +275,7 @@ }, { "type": "dict-modifiable", + "store_as_list": true, "key": "additional_options", "label": "Additional Renderer Options", "use_label_wrap": true, @@ -403,6 +404,7 @@ }, { "type": "dict-modifiable", + "store_as_list": true, "key": "additional_options", "label": "Additional Renderer Options", "use_label_wrap": true, From 84a6c144c72928d252e04d3c378eef2926e3fdfa Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Mon, 1 Aug 2022 15:16:50 +0300 Subject: [PATCH 644/785] Handle additional attributes --- openpype/hosts/maya/api/lib_rendersettings.py | 30 +++++++++++++++++-- 1 file changed, 28 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/maya/api/lib_rendersettings.py b/openpype/hosts/maya/api/lib_rendersettings.py index 6154e1ab89..9aea55a03b 100644 --- a/openpype/hosts/maya/api/lib_rendersettings.py +++ b/openpype/hosts/maya/api/lib_rendersettings.py @@ -139,6 +139,7 @@ class RenderSettings(object): # allow fullstops in custom attributes. Then checks for # type of MtoA attribute passed to adjust the `setAttr` # command accordingly. + self._additional_attribs_setter(additional_options) for item in additional_options: attribute, value = item if (cmds.getAttr(str(attribute), type=True)) == "long": @@ -157,18 +158,28 @@ class RenderSettings(object): ["RenderSettings"] ["redshift_renderer"] ) - img_ext = redshift_render_presets.get("image_format") + additional_options = redshift_render_presets["additional_options"] + ext = redshift_render_presets["image_format"] + img_exts = ["iff", "exr", "tif", "png", "tga", "jpg"] + img_ext = img_exts.index(ext) + self._set_global_output_settings() cmds.setAttr("redshiftOptions.imageFormat", img_ext) cmds.setAttr("defaultResolution.width", width) cmds.setAttr("defaultResolution.height", height) + self._additional_attribs_setter(additional_options) def _set_vray_settings(self, aov_separator, width, height): # type: (str, int, int) -> None """Sets important settings for Vray.""" settings = cmds.ls(type="VRaySettingsNode") node = settings[0] if settings else cmds.createNode("VRaySettingsNode") - + vray_render_presets = ( + self._project_settings + ["maya"] + ["RenderSettings"] + ["vray_renderer"] + ) # Set aov separator # First we need to explicitly set the UI items in Render Settings # because that is also what V-Ray updates to when that Render Settings @@ -207,6 +218,10 @@ class RenderSettings(object): cmds.setAttr("{}.width".format(node), width) cmds.setAttr("{}.height".format(node), height) + additional_options = vray_render_presets["additional_options"] + + self._additional_attribs_setter(additional_options) + @staticmethod def _set_global_output_settings(): # enable animation @@ -214,3 +229,14 @@ class RenderSettings(object): cmds.setAttr("defaultRenderGlobals.animation", 1) cmds.setAttr("defaultRenderGlobals.putFrameBeforeExt", 1) cmds.setAttr("defaultRenderGlobals.extensionPadding", 4) + + def _additional_attribs_setter(self, additional_attribs): + print(additional_attribs) + for item in additional_attribs: + attribute, value = item + if (cmds.getAttr(str(attribute), type=True)) == "long": + cmds.setAttr(str(attribute), int(value)) + elif (cmds.getAttr(str(attribute), type=True)) == "bool": + cmds.setAttr(str(attribute), int(value)) # noqa + elif (cmds.getAttr(str(attribute), type=True)) == "string": + cmds.setAttr(str(attribute), str(value), type = "string") # noqa From bb10fdd041c499f30e5ffa7dd4069828b9f42239 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Mon, 1 Aug 2022 18:00:14 +0200 Subject: [PATCH 645/785] :rotating_light: f-strings and cosmetic issues --- igniter/bootstrap_repos.py | 10 +++---- start.py | 55 +++++++++++++++++--------------------- tools/create_zip.py | 2 +- 3 files changed, 29 insertions(+), 38 deletions(-) diff --git a/igniter/bootstrap_repos.py b/igniter/bootstrap_repos.py index 08333885c0..8888440f90 100644 --- a/igniter/bootstrap_repos.py +++ b/igniter/bootstrap_repos.py @@ -122,7 +122,7 @@ class OpenPypeVersion(semver.VersionInfo): if self.staging: if kwargs.get("build"): if "staging" not in kwargs.get("build"): - kwargs["build"] = "{}-staging".format(kwargs.get("build")) + kwargs["build"] = f"{kwargs.get('build')}-staging" else: kwargs["build"] = "staging" @@ -136,8 +136,7 @@ class OpenPypeVersion(semver.VersionInfo): return bool(result and self.staging == other.staging) def __repr__(self): - return "<{}: {} - path={}>".format( - self.__class__.__name__, str(self), self.path) + return f"<{self.__class__.__name__}: {str(self)} - path={self.path}>" def __lt__(self, other: OpenPypeVersion): result = super().__lt__(other) @@ -232,10 +231,7 @@ class OpenPypeVersion(semver.VersionInfo): return openpype_version def __hash__(self): - if self.path: - return hash(self.path) - else: - return hash(str(self)) + return hash(self.path) if self.path else hash(str(self)) @staticmethod def is_version_in_dir( diff --git a/start.py b/start.py index cbf8ffd178..37cc4c063d 100644 --- a/start.py +++ b/start.py @@ -187,9 +187,8 @@ else: if "--headless" in sys.argv: os.environ["OPENPYPE_HEADLESS_MODE"] = "1" sys.argv.remove("--headless") -else: - if os.getenv("OPENPYPE_HEADLESS_MODE") != "1": - os.environ.pop("OPENPYPE_HEADLESS_MODE", None) +elif os.getenv("OPENPYPE_HEADLESS_MODE") != "1": + os.environ.pop("OPENPYPE_HEADLESS_MODE", None) # Enabled logging debug mode when "--debug" is passed if "--verbose" in sys.argv: @@ -203,8 +202,8 @@ if "--verbose" in sys.argv: value = sys.argv.pop(idx) else: raise RuntimeError(( - "Expect value after \"--verbose\" argument. {}" - ).format(expected_values)) + f"Expect value after \"--verbose\" argument. {expected_values}" + )) log_level = None low_value = value.lower() @@ -225,8 +224,9 @@ if "--verbose" in sys.argv: if log_level is None: raise RuntimeError(( - "Unexpected value after \"--verbose\" argument \"{}\". {}" - ).format(value, expected_values)) + "Unexpected value after \"--verbose\" " + f"argument \"{value}\". {expected_values}" + )) os.environ["OPENPYPE_LOG_LEVEL"] = str(log_level) @@ -336,34 +336,33 @@ def run_disk_mapping_commands(settings): destination = destination.rstrip('/') source = source.rstrip('/') - if low_platform == "windows": - args = ["subst", destination, source] - elif low_platform == "darwin": - scr = "do shell script \"ln -s {} {}\" with administrator privileges".format(source, destination) # noqa: E501 + if low_platform == "darwin": + scr = f'do shell script "ln -s {source} {destination}" with administrator privileges' # noqa + args = ["osascript", "-e", scr] + elif low_platform == "windows": + args = ["subst", destination, source] else: args = ["sudo", "ln", "-s", source, destination] - _print("disk mapping args:: {}".format(args)) + _print(f"*** disk mapping arguments: {args}") try: if not os.path.exists(destination): output = subprocess.Popen(args) if output.returncode and output.returncode != 0: - exc_msg = "Executing was not successful: \"{}\"".format( - args) + exc_msg = f'Executing was not successful: "{args}"' raise RuntimeError(exc_msg) except TypeError as exc: - _print("Error {} in mapping drive {}, {}".format(str(exc), - source, - destination)) + _print( + f"Error {str(exc)} in mapping drive {source}, {destination}") raise def set_avalon_environments(): """Set avalon specific environments. - These are non modifiable environments for avalon workflow that must be set + These are non-modifiable environments for avalon workflow that must be set before avalon module is imported because avalon works with globals set with environment variables. """ @@ -508,7 +507,7 @@ def _process_arguments() -> tuple: ) if m and m.group('version'): use_version = m.group('version') - _print(">>> Requested version [ {} ]".format(use_version)) + _print(f">>> Requested version [ {use_version} ]") if "+staging" in use_version: use_staging = True break @@ -614,8 +613,8 @@ def _determine_mongodb() -> str: try: openpype_mongo = bootstrap.secure_registry.get_item( "openPypeMongo") - except ValueError: - raise RuntimeError("Missing MongoDB url") + except ValueError as e: + raise RuntimeError("Missing MongoDB url") from e return openpype_mongo @@ -816,11 +815,8 @@ def _bootstrap_from_code(use_version, use_staging): use_version, use_staging ) if version_to_use is None: - raise OpenPypeVersionNotFound( - "Requested version \"{}\" was not found.".format( - use_version - ) - ) + raise OpenPypeVersionIncompatible( + f"Requested version \"{use_version}\" was not found.") else: # Staging version should be used version_to_use = bootstrap.find_latest_openpype_version( @@ -906,7 +902,7 @@ def _boot_validate_versions(use_version, local_version): use_version, openpype_versions ) valid, message = bootstrap.validate_openpype_version(version_path) - _print("{}{}".format(">>> " if valid else "!!! ", message)) + _print(f'{">>> " if valid else "!!! "}{message}') def _boot_print_versions(use_staging, local_version, openpype_root): @@ -1043,7 +1039,7 @@ def boot(): if not result[0]: _print(f"!!! Invalid version: {result[1]}") sys.exit(1) - _print(f"--- version is valid") + _print("--- version is valid") else: try: version_path = _bootstrap_from_code(use_version, use_staging) @@ -1164,8 +1160,7 @@ def get_info(use_staging=None) -> list: formatted = [] for info in inf: padding = (maximum - len(info[0])) + 1 - formatted.append( - "... {}:{}[ {} ]".format(info[0], " " * padding, info[1])) + formatted.append(f'... {info[0]}:{" " * padding}[ {info[1]} ]') return formatted diff --git a/tools/create_zip.py b/tools/create_zip.py index 2fc351469a..6392428f58 100644 --- a/tools/create_zip.py +++ b/tools/create_zip.py @@ -61,7 +61,7 @@ def _print(msg: str, message_type: int = 0) -> None: else: header = term.darkolivegreen3("--- ") - print("{}{}".format(header, msg)) + print(f"{header}{msg}") if __name__ == "__main__": From a9f910ac227fd0f90a589ba9035d232c0c62e6ce Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Mon, 1 Aug 2022 18:01:03 +0200 Subject: [PATCH 646/785] :recycle: add openpype version env var to deadline job --- .../deadline/plugins/publish/submit_aftereffects_deadline.py | 3 ++- .../deadline/plugins/publish/submit_harmony_deadline.py | 3 ++- .../deadline/plugins/publish/submit_houdini_remote_publish.py | 1 + .../deadline/plugins/publish/submit_houdini_render_deadline.py | 1 + .../modules/deadline/plugins/publish/submit_maya_deadline.py | 3 ++- .../plugins/publish/submit_maya_remote_publish_deadline.py | 3 ++- .../modules/deadline/plugins/publish/submit_nuke_deadline.py | 3 ++- .../modules/deadline/plugins/publish/submit_publish_job.py | 3 ++- 8 files changed, 14 insertions(+), 6 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_aftereffects_deadline.py b/openpype/modules/deadline/plugins/publish/submit_aftereffects_deadline.py index de8df3dd9e..c55f85c8da 100644 --- a/openpype/modules/deadline/plugins/publish/submit_aftereffects_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_aftereffects_deadline.py @@ -80,7 +80,8 @@ class AfterEffectsSubmitDeadline( "AVALON_TASK", "AVALON_APP_NAME", "OPENPYPE_DEV", - "OPENPYPE_LOG_NO_COLORS" + "OPENPYPE_LOG_NO_COLORS", + "OPENPYPE_VERSION" ] # Add mongo url if it's enabled if self._instance.context.data.get("deadlinePassMongoUrl"): diff --git a/openpype/modules/deadline/plugins/publish/submit_harmony_deadline.py b/openpype/modules/deadline/plugins/publish/submit_harmony_deadline.py index a1ee5e0957..3f9c09b592 100644 --- a/openpype/modules/deadline/plugins/publish/submit_harmony_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_harmony_deadline.py @@ -274,7 +274,8 @@ class HarmonySubmitDeadline( "AVALON_TASK", "AVALON_APP_NAME", "OPENPYPE_DEV", - "OPENPYPE_LOG_NO_COLORS" + "OPENPYPE_LOG_NO_COLORS", + "OPENPYPE_VERSION" ] # Add mongo url if it's enabled if self._instance.context.data.get("deadlinePassMongoUrl"): diff --git a/openpype/modules/deadline/plugins/publish/submit_houdini_remote_publish.py b/openpype/modules/deadline/plugins/publish/submit_houdini_remote_publish.py index fdf67b51bc..95856137e2 100644 --- a/openpype/modules/deadline/plugins/publish/submit_houdini_remote_publish.py +++ b/openpype/modules/deadline/plugins/publish/submit_houdini_remote_publish.py @@ -130,6 +130,7 @@ class HoudiniSubmitPublishDeadline(pyblish.api.ContextPlugin): # this application with so the Render Slave can build its own # similar environment using it, e.g. "houdini17.5;pluginx2.3" "AVALON_TOOLS", + "OPENPYPE_VERSION" ] # Add mongo url if it's enabled if context.data.get("deadlinePassMongoUrl"): diff --git a/openpype/modules/deadline/plugins/publish/submit_houdini_render_deadline.py b/openpype/modules/deadline/plugins/publish/submit_houdini_render_deadline.py index aca88c7440..beda753723 100644 --- a/openpype/modules/deadline/plugins/publish/submit_houdini_render_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_houdini_render_deadline.py @@ -101,6 +101,7 @@ class HoudiniSubmitRenderDeadline(pyblish.api.InstancePlugin): # this application with so the Render Slave can build its own # similar environment using it, e.g. "maya2018;vray4.x;yeti3.1.9" "AVALON_TOOLS", + "OPENPYPE_VERSION" ] # Add mongo url if it's enabled if context.data.get("deadlinePassMongoUrl"): diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 145b6d795f..f253ceb21a 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -525,7 +525,8 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin): "AVALON_TASK", "AVALON_APP_NAME", "OPENPYPE_DEV", - "OPENPYPE_LOG_NO_COLORS" + "OPENPYPE_LOG_NO_COLORS", + "OPENPYPE_VERSION" ] # Add mongo url if it's enabled if instance.context.data.get("deadlinePassMongoUrl"): diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_remote_publish_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_remote_publish_deadline.py index 57572fcb24..9b1852392b 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_remote_publish_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_remote_publish_deadline.py @@ -102,7 +102,8 @@ class MayaSubmitRemotePublishDeadline(pyblish.api.InstancePlugin): keys = [ "FTRACK_API_USER", "FTRACK_API_KEY", - "FTRACK_SERVER" + "FTRACK_SERVER", + "OPENPYPE_VERSION" ] environment = dict({key: os.environ[key] for key in keys if key in os.environ}, **legacy_io.Session) diff --git a/openpype/modules/deadline/plugins/publish/submit_nuke_deadline.py b/openpype/modules/deadline/plugins/publish/submit_nuke_deadline.py index 93fb511a34..a328c3633d 100644 --- a/openpype/modules/deadline/plugins/publish/submit_nuke_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_nuke_deadline.py @@ -261,7 +261,8 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin): "PYBLISHPLUGINPATH", "NUKE_PATH", "TOOL_ENV", - "FOUNDRY_LICENSE" + "FOUNDRY_LICENSE", + "OPENPYPE_VERSION" ] # Add mongo url if it's enabled if instance.context.data.get("deadlinePassMongoUrl"): diff --git a/openpype/modules/deadline/plugins/publish/submit_publish_job.py b/openpype/modules/deadline/plugins/publish/submit_publish_job.py index 43ea64e565..5c7998465b 100644 --- a/openpype/modules/deadline/plugins/publish/submit_publish_job.py +++ b/openpype/modules/deadline/plugins/publish/submit_publish_job.py @@ -141,7 +141,8 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): "OPENPYPE_USERNAME", "OPENPYPE_RENDER_JOB", "OPENPYPE_PUBLISH_JOB", - "OPENPYPE_MONGO" + "OPENPYPE_MONGO", + "OPENPYPE_VERSION" ] # custom deadline attributes From 0e126a2d829e814d39747b4073cac2fb2cbc7b45 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Mon, 1 Aug 2022 18:01:25 +0200 Subject: [PATCH 647/785] :recycle: handle multiple versions --- igniter/tools.py | 5 +++ openpype/cli.py | 23 ++++++++++++++ start.py | 83 ++++++++++++++++++++++++++++++++---------------- 3 files changed, 84 insertions(+), 27 deletions(-) diff --git a/igniter/tools.py b/igniter/tools.py index 57159b5e52..a9d592acf0 100644 --- a/igniter/tools.py +++ b/igniter/tools.py @@ -21,6 +21,11 @@ class OpenPypeVersionNotFound(Exception): pass +class OpenPypeVersionIncompatible(Exception): + """OpenPype version is not compatible with the installed one (build).""" + pass + + def should_add_certificate_path_to_mongo_url(mongo_url): """Check if should add ca certificate to mongo url. diff --git a/openpype/cli.py b/openpype/cli.py index 9a2dfaa141..ffe288040e 100644 --- a/openpype/cli.py +++ b/openpype/cli.py @@ -443,3 +443,26 @@ def interactive(): __version__, sys.version, sys.platform ) code.interact(banner) + + +@main.command() +@click.option("--build", help="Print only build version", + is_flag=True, default=False) +def version(build): + """Print OpenPype version.""" + + from openpype.version import __version__ + from igniter.bootstrap_repos import BootstrapRepos, OpenPypeVersion + from pathlib import Path + import os + + if getattr(sys, 'frozen', False): + local_version = BootstrapRepos.get_version( + Path(os.getenv("OPENPYPE_ROOT"))) + else: + local_version = OpenPypeVersion.get_installed_version_str() + + if build: + print(local_version) + return + print(f"{__version__} (booted: {local_version})") diff --git a/start.py b/start.py index 37cc4c063d..5cdffafb6e 100644 --- a/start.py +++ b/start.py @@ -103,6 +103,9 @@ import site import distutils.spawn from pathlib import Path + +silent_mode = False + # OPENPYPE_ROOT is variable pointing to build (or code) directory # WARNING `OPENPYPE_ROOT` must be defined before igniter import # - igniter changes cwd which cause that filepath of this script won't lead @@ -138,40 +141,44 @@ if sys.__stdout__: term = blessed.Terminal() def _print(message: str): + if silent_mode: + return if message.startswith("!!! "): - print("{}{}".format(term.orangered2("!!! "), message[4:])) + print(f'{term.orangered2("!!! ")}{message[4:]}') return if message.startswith(">>> "): - print("{}{}".format(term.aquamarine3(">>> "), message[4:])) + print(f'{term.aquamarine3(">>> ")}{message[4:]}') return if message.startswith("--- "): - print("{}{}".format(term.darkolivegreen3("--- "), message[4:])) + print(f'{term.darkolivegreen3("--- ")}{message[4:]}') return if message.startswith("*** "): - print("{}{}".format(term.gold("*** "), message[4:])) + print(f'{term.gold("*** ")}{message[4:]}') return if message.startswith(" - "): - print("{}{}".format(term.wheat(" - "), message[4:])) + print(f'{term.wheat(" - ")}{message[4:]}') return if message.startswith(" . "): - print("{}{}".format(term.tan(" . "), message[4:])) + print(f'{term.tan(" . ")}{message[4:]}') return if message.startswith(" - "): - print("{}{}".format(term.seagreen3(" - "), message[7:])) + print(f'{term.seagreen3(" - ")}{message[7:]}') return if message.startswith(" ! "): - print("{}{}".format(term.goldenrod(" ! "), message[7:])) + print(f'{term.goldenrod(" ! ")}{message[7:]}') return if message.startswith(" * "): - print("{}{}".format(term.aquamarine1(" * "), message[7:])) + print(f'{term.aquamarine1(" * ")}{message[7:]}') return if message.startswith(" "): - print("{}{}".format(term.darkseagreen3(" "), message[4:])) + print(f'{term.darkseagreen3(" ")}{message[4:]}') return print(message) else: def _print(message: str): + if silent_mode: + return print(message) @@ -242,13 +249,14 @@ from igniter.tools import ( get_openpype_global_settings, get_openpype_path_from_settings, validate_mongo_connection, - OpenPypeVersionNotFound + OpenPypeVersionNotFound, + OpenPypeVersionIncompatible ) # noqa from igniter.bootstrap_repos import OpenPypeVersion # noqa: E402 bootstrap = BootstrapRepos() silent_commands = {"run", "igniter", "standalonepublisher", - "extractenvironments"} + "extractenvironments", "version"} def list_versions(openpype_versions: list, local_version=None) -> None: @@ -686,40 +694,47 @@ def _find_frozen_openpype(use_version: str = None, # Specific version is defined if use_version.lower() == "latest": # Version says to use latest version - _print("Finding latest version defined by use version") + _print(">>> Finding latest version defined by use version") openpype_version = bootstrap.find_latest_openpype_version( - use_staging + use_staging, compatible_with=installed_version ) else: - _print("Finding specified version \"{}\"".format(use_version)) + _print(f">>> Finding specified version \"{use_version}\"") openpype_version = bootstrap.find_openpype_version( use_version, use_staging ) if openpype_version is None: raise OpenPypeVersionNotFound( - "Requested version \"{}\" was not found.".format( - use_version - ) + f"Requested version \"{use_version}\" was not found." ) + if not openpype_version.is_compatible(installed_version): + raise OpenPypeVersionIncompatible(( + f"Requested version \"{use_version}\" is not compatible " + f"with installed version \"{installed_version}\"" + )) + elif studio_version is not None: # Studio has defined a version to use - _print("Finding studio version \"{}\"".format(studio_version)) + _print(f">>> Finding studio version \"{studio_version}\"") openpype_version = bootstrap.find_openpype_version( - studio_version, use_staging + studio_version, use_staging, compatible_with=installed_version ) if openpype_version is None: raise OpenPypeVersionNotFound(( - "Requested OpenPype version \"{}\" defined by settings" + "Requested OpenPype version " + f"\"{studio_version}\" defined by settings" " was not found." - ).format(studio_version)) + )) else: # Default behavior to use latest version - _print("Finding latest version") + _print(( + ">>> Finding latest version compatible " + f"with [ {installed_version} ]")) openpype_version = bootstrap.find_latest_openpype_version( - use_staging + use_staging, compatible_with=installed_version ) if openpype_version is None: if use_staging: @@ -800,7 +815,7 @@ def _bootstrap_from_code(use_version, use_staging): if getattr(sys, 'frozen', False): local_version = bootstrap.get_version(Path(_openpype_root)) - switch_str = f" - will switch to {use_version}" if use_version else "" + switch_str = f" - will switch to {use_version}" if use_version and use_version != local_version else "" # noqa _print(f" - booting version: {local_version}{switch_str}") assert local_version else: @@ -913,13 +928,24 @@ def _boot_print_versions(use_staging, local_version, openpype_root): _print("--- This will list only staging versions detected.") _print(" To see other version, omit --use-staging argument.") - openpype_versions = bootstrap.find_openpype(include_zips=True, - staging=use_staging) if getattr(sys, 'frozen', False): local_version = bootstrap.get_version(Path(openpype_root)) else: local_version = OpenPypeVersion.get_installed_version_str() + compatible_with = OpenPypeVersion(version=local_version) + if "--all" in sys.argv: + compatible_with = None + _print("--- Showing all version (even those not compatible).") + else: + _print(("--- Showing only compatible versions " + f"with [ {compatible_with.major}.{compatible_with.minor} ]")) + + openpype_versions = bootstrap.find_openpype( + include_zips=True, + staging=use_staging, + compatible_with=compatible_with) + list_versions(openpype_versions, local_version) @@ -936,6 +962,9 @@ def _boot_handle_missing_version(local_version, use_staging, message): def boot(): """Bootstrap OpenPype.""" + global silent_mode + if any(arg in silent_commands for arg in sys.argv): + silent_mode = True # ------------------------------------------------------------------------ # Set environment to OpenPype root path From 9205d4bde12baf8901a2ba675157cc0b4ad65919 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Mon, 1 Aug 2022 18:02:24 +0200 Subject: [PATCH 648/785] :recycle: changes in bootstrapping for multiple versions --- igniter/bootstrap_repos.py | 196 ++++++++++++++++++++++++++++++------- 1 file changed, 158 insertions(+), 38 deletions(-) diff --git a/igniter/bootstrap_repos.py b/igniter/bootstrap_repos.py index 8888440f90..47f2525952 100644 --- a/igniter/bootstrap_repos.py +++ b/igniter/bootstrap_repos.py @@ -380,7 +380,8 @@ class OpenPypeVersion(semver.VersionInfo): @classmethod def get_local_versions( - cls, production: bool = None, staging: bool = None + cls, production: bool = None, + staging: bool = None, compatible_with: OpenPypeVersion = None ) -> List: """Get all versions available on this machine. @@ -390,6 +391,8 @@ class OpenPypeVersion(semver.VersionInfo): Args: production (bool): Return production versions. staging (bool): Return staging versions. + compatible_with (OpenPypeVersion): Return only those compatible + with specified version. """ # Return all local versions if arguments are set to None if production is None and staging is None: @@ -406,10 +409,19 @@ class OpenPypeVersion(semver.VersionInfo): if not production and not staging: return [] + # DEPRECATED: backwards compatible way to look for versions in root dir_to_search = Path(user_data_dir("openpype", "pypeclub")) versions = OpenPypeVersion.get_versions_from_directory( - dir_to_search + dir_to_search, compatible_with=compatible_with ) + if compatible_with: + dir_to_search = Path( + user_data_dir("openpype", "pypeclub")) / f"{compatible_with.major}.{compatible_with.minor}" # noqa + versions += OpenPypeVersion.get_versions_from_directory( + dir_to_search, compatible_with=compatible_with + ) + + filtered_versions = [] for version in versions: if version.is_staging(): @@ -421,7 +433,8 @@ class OpenPypeVersion(semver.VersionInfo): @classmethod def get_remote_versions( - cls, production: bool = None, staging: bool = None + cls, production: bool = None, + staging: bool = None, compatible_with: OpenPypeVersion = None ) -> List: """Get all versions available in OpenPype Path. @@ -431,6 +444,8 @@ class OpenPypeVersion(semver.VersionInfo): Args: production (bool): Return production versions. staging (bool): Return staging versions. + compatible_with (OpenPypeVersion): Return only those compatible + with specified version. """ # Return all local versions if arguments are set to None if production is None and staging is None: @@ -464,7 +479,14 @@ class OpenPypeVersion(semver.VersionInfo): if not dir_to_search: return [] - versions = cls.get_versions_from_directory(dir_to_search) + # DEPRECATED: look for version in root directory + versions = cls.get_versions_from_directory( + dir_to_search, compatible_with=compatible_with) + if compatible_with: + dir_to_search = dir_to_search / f"{compatible_with.major}.{compatible_with.minor}" # noqa + versions += cls.get_versions_from_directory( + dir_to_search, compatible_with=compatible_with) + filtered_versions = [] for version in versions: if version.is_staging(): @@ -475,11 +497,15 @@ class OpenPypeVersion(semver.VersionInfo): return list(sorted(set(filtered_versions))) @staticmethod - def get_versions_from_directory(openpype_dir: Path) -> List: + def get_versions_from_directory( + openpype_dir: Path, + compatible_with: OpenPypeVersion = None) -> List: """Get all detected OpenPype versions in directory. Args: openpype_dir (Path): Directory to scan. + compatible_with (OpenPypeVersion): Return only versions compatible + with build version specified as OpenPypeVersion. Returns: list of OpenPypeVersion @@ -514,6 +540,10 @@ class OpenPypeVersion(semver.VersionInfo): )[0]: continue + if compatible_with and not detected_version.is_compatible( + compatible_with): + continue + detected_version.path = item _openpype_versions.append(detected_version) @@ -545,8 +575,9 @@ class OpenPypeVersion(semver.VersionInfo): def get_latest_version( staging: bool = False, local: bool = None, - remote: bool = None - ) -> OpenPypeVersion: + remote: bool = None, + compatible_with: OpenPypeVersion = None + ) -> Union[OpenPypeVersion, None]: """Get latest available version. The version does not contain information about path and source. @@ -564,6 +595,9 @@ class OpenPypeVersion(semver.VersionInfo): staging (bool, optional): List staging versions if True. local (bool, optional): List local versions if True. remote (bool, optional): List remote versions if True. + compatible_with (OpenPypeVersion, optional) Return only version + compatible with compatible_with. + """ if local is None and remote is None: local = True @@ -594,7 +628,12 @@ class OpenPypeVersion(semver.VersionInfo): return None all_versions.sort() - return all_versions[-1] + latest_version: OpenPypeVersion + latest_version = all_versions[-1] + if compatible_with and not latest_version.is_compatible( + compatible_with): + return None + return latest_version @classmethod def get_expected_studio_version(cls, staging=False, global_settings=None): @@ -617,6 +656,21 @@ class OpenPypeVersion(semver.VersionInfo): return None return OpenPypeVersion(version=result) + def is_compatible(self, version: OpenPypeVersion): + """Test build compatibility. + + This will simply compare major and minor versions (ignoring patch + and the rest). + + Args: + version (OpenPypeVersion): Version to check compatibility with. + + Returns: + bool: if the version is compatible + + """ + return self.major == version.major and self.minor == version.minor + class BootstrapRepos: """Class for bootstrapping local OpenPype installation. @@ -737,8 +791,9 @@ class BootstrapRepos: return # create destination directory - if not self.data_dir.exists(): - self.data_dir.mkdir(parents=True) + destination = self.data_dir / f"{installed_version.major}.{installed_version.minor}" # noqa + if not destination.exists(): + destination.mkdir(parents=True) # create zip inside temporary directory. with tempfile.TemporaryDirectory() as temp_dir: @@ -766,7 +821,9 @@ class BootstrapRepos: Path to moved zip on success. """ - destination = self.data_dir / zip_file.name + version = OpenPypeVersion.version_in_str(zip_file.name) + destination_dir = self.data_dir / f"{version.major}.{version.minor}" + destination = destination_dir / zip_file.name if destination.exists(): self._print( @@ -778,7 +835,7 @@ class BootstrapRepos: self._print(str(e), LOG_ERROR, exc_info=True) return None try: - shutil.move(zip_file.as_posix(), self.data_dir.as_posix()) + shutil.move(zip_file.as_posix(), destination_dir.as_posix()) except shutil.Error as e: self._print(str(e), LOG_ERROR, exc_info=True) return None @@ -991,6 +1048,16 @@ class BootstrapRepos: @staticmethod def _validate_dir(path: Path) -> tuple: + """Validate checksums in a given path. + + Args: + path (Path): path to folder to validate. + + Returns: + tuple(bool, str): returns status and reason as a bool + and str in a tuple. + + """ checksums_file = Path(path / "checksums") if not checksums_file.exists(): # FIXME: This should be set to False sometimes in the future @@ -1072,7 +1139,20 @@ class BootstrapRepos: sys.path.insert(0, directory.as_posix()) @staticmethod - def find_openpype_version(version, staging): + def find_openpype_version( + version: Union[str, OpenPypeVersion], + staging: bool, + compatible_with: OpenPypeVersion = None + ) -> Union[OpenPypeVersion, None]: + """Find location of specified OpenPype version. + + Args: + version (Union[str, OpenPypeVersion): Version to find. + staging (bool): Filter staging versions. + compatible_with (OpenPypeVersion, optional): Find only + versions compatible with specified one. + + """ if isinstance(version, str): version = OpenPypeVersion(version=version) @@ -1081,7 +1161,8 @@ class BootstrapRepos: return installed_version local_versions = OpenPypeVersion.get_local_versions( - staging=staging, production=not staging + staging=staging, production=not staging, + compatible_with=compatible_with ) zip_version = None for local_version in local_versions: @@ -1095,7 +1176,8 @@ class BootstrapRepos: return zip_version remote_versions = OpenPypeVersion.get_remote_versions( - staging=staging, production=not staging + staging=staging, production=not staging, + compatible_with=compatible_with ) for remote_version in remote_versions: if remote_version == version: @@ -1103,13 +1185,14 @@ class BootstrapRepos: return None @staticmethod - def find_latest_openpype_version(staging): + def find_latest_openpype_version( + staging, compatible_with: OpenPypeVersion = None): installed_version = OpenPypeVersion.get_installed_version() local_versions = OpenPypeVersion.get_local_versions( - staging=staging + staging=staging, compatible_with=compatible_with ) remote_versions = OpenPypeVersion.get_remote_versions( - staging=staging + staging=staging, compatible_with=compatible_with ) all_versions = local_versions + remote_versions if not staging: @@ -1134,7 +1217,9 @@ class BootstrapRepos: self, openpype_path: Union[Path, str] = None, staging: bool = False, - include_zips: bool = False) -> Union[List[OpenPypeVersion], None]: + include_zips: bool = False, + compatible_with: OpenPypeVersion = None + ) -> Union[List[OpenPypeVersion], None]: """Get ordered dict of detected OpenPype version. Resolution order for OpenPype is following: @@ -1150,6 +1235,8 @@ class BootstrapRepos: otherwise. include_zips (bool, optional): If set True it will try to find OpenPype in zip files in given directory. + compatible_with (OpenPypeVersion, optional): Find only those + versions compatible with the one specified. Returns: dict of Path: Dictionary of detected OpenPype version. @@ -1168,30 +1255,56 @@ class BootstrapRepos: ("Finding OpenPype in non-filesystem locations is" " not implemented yet.")) - dir_to_search = self.data_dir - user_versions = self.get_openpype_versions(self.data_dir, staging) - # if we have openpype_path specified, search only there. + version_dir = "" + if compatible_with: + version_dir = f"{compatible_with.major}.{compatible_with.minor}" + + # if checks bellow for OPENPYPE_PATH and registry fails, use data_dir + # DEPRECATED: lookup in root of this folder is deprecated in favour + # of major.minor sub-folders. + dirs_to_search = [ + self.data_dir + ] + if compatible_with: + dirs_to_search.append(self.data_dir / version_dir) + if openpype_path: - dir_to_search = openpype_path + dirs_to_search = [openpype_path] + + if compatible_with: + dirs_to_search.append(openpype_path / version_dir) else: - if os.getenv("OPENPYPE_PATH"): - if Path(os.getenv("OPENPYPE_PATH")).exists(): - dir_to_search = Path(os.getenv("OPENPYPE_PATH")) + # first try OPENPYPE_PATH and if that is not available, + # try registry. + if os.getenv("OPENPYPE_PATH") \ + and Path(os.getenv("OPENPYPE_PATH")).exists(): + dirs_to_search = [Path(os.getenv("OPENPYPE_PATH"))] + + if compatible_with: + dirs_to_search.append( + Path(os.getenv("OPENPYPE_PATH")) / version_dir) else: try: registry_dir = Path( str(self.registry.get_item("openPypePath"))) if registry_dir.exists(): - dir_to_search = registry_dir + dirs_to_search = [registry_dir] + if compatible_with: + dirs_to_search.append(registry_dir / version_dir) except ValueError: # nothing found in registry, we'll use data dir pass - openpype_versions = self.get_openpype_versions(dir_to_search, staging) - openpype_versions += user_versions + openpype_versions = [] + for dir_to_search in dirs_to_search: + try: + openpype_versions += self.get_openpype_versions( + dir_to_search, staging, compatible_with=compatible_with) + except ValueError: + # location is invalid, skip it + pass - # remove zip file version if needed. if not include_zips: openpype_versions = [ v for v in openpype_versions if v.path.suffix != ".zip" @@ -1304,9 +1417,8 @@ class BootstrapRepos: raise ValueError( f"version {version} is not associated with any file") - destination = self.data_dir / version.path.stem - if destination.exists(): - assert destination.is_dir() + destination = self.data_dir / f"{version.major}.{version.minor}" / version.path.stem # noqa + if destination.exists() and destination.is_dir(): try: shutil.rmtree(destination) except OSError as e: @@ -1375,7 +1487,7 @@ class BootstrapRepos: else: dir_name = openpype_version.path.stem - destination = self.data_dir / dir_name + destination = self.data_dir / f"{openpype_version.major}.{openpype_version.minor}" / dir_name # noqa # test if destination directory already exist, if so lets delete it. if destination.exists() and force: @@ -1553,14 +1665,18 @@ class BootstrapRepos: return False return True - def get_openpype_versions(self, - openpype_dir: Path, - staging: bool = False) -> list: + def get_openpype_versions( + self, + openpype_dir: Path, + staging: bool = False, + compatible_with: OpenPypeVersion = None) -> list: """Get all detected OpenPype versions in directory. Args: openpype_dir (Path): Directory to scan. staging (bool, optional): Find staging versions if True. + compatible_with (OpenPypeVersion, optional): Get only versions + compatible with the one specified. Returns: list of OpenPypeVersion @@ -1570,7 +1686,7 @@ class BootstrapRepos: """ if not openpype_dir.exists() and not openpype_dir.is_dir(): - raise ValueError("specified directory is invalid") + raise ValueError(f"specified directory {openpype_dir} is invalid") _openpype_versions = [] # iterate over directory in first level and find all that might @@ -1595,6 +1711,10 @@ class BootstrapRepos: ): continue + if compatible_with and \ + not detected_version.is_compatible(compatible_with): + continue + detected_version.path = item if staging and detected_version.is_staging(): _openpype_versions.append(detected_version) From de70521f562084bf5a0cef20179ad2b73efa3bb4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Mon, 1 Aug 2022 18:02:53 +0200 Subject: [PATCH 649/785] :recycle: deadline plugin support for job specific OP versions --- .../custom/plugins/GlobalJobPreLoad.py | 87 +++++++++++++++++-- .../custom/plugins/OpenPype/OpenPype.param | 11 ++- .../custom/plugins/OpenPype/OpenPype.py | 86 +++++++++++++++++- 3 files changed, 171 insertions(+), 13 deletions(-) diff --git a/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py b/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py index bcd853f374..a43c6c7733 100644 --- a/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py +++ b/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py @@ -6,13 +6,29 @@ import subprocess import json import platform import uuid -from Deadline.Scripting import RepositoryUtils, FileUtils +import re +from Deadline.Scripting import RepositoryUtils, FileUtils, DirectoryUtils + + +def get_openpype_version_from_path(path): + version_file = os.path.join(path, "openpype", "version.py") + if not os.path.isfile(version_file): + return None + version = {} + with open(version_file, "r") as vf: + exec(vf.read(), version) + + version_match = re.search(r"(\d+\.\d+.\d+).*", version["__version__"]) + return version_match[1] def get_openpype_executable(): """Return OpenPype Executable from Event Plug-in Settings""" config = RepositoryUtils.GetPluginConfig("OpenPype") - return config.GetConfigEntryWithDefault("OpenPypeExecutable", "") + exe_list = config.GetConfigEntryWithDefault("OpenPypeExecutable", "") + dir_list = config.GetConfigEntryWithDefault( + "OpenPypeInstallationDirs", "") + return exe_list, dir_list def inject_openpype_environment(deadlinePlugin): @@ -25,16 +41,71 @@ def inject_openpype_environment(deadlinePlugin): print(">>> Injecting OpenPype environments ...") try: print(">>> Getting OpenPype executable ...") - exe_list = get_openpype_executable() - openpype_app = FileUtils.SearchFileList(exe_list) - if openpype_app == "": + exe_list, dir_list = get_openpype_executable() + openpype_versions = [] + # if the job requires specific OpenPype version, + # lets go over all available and find compatible build. + requested_version = job.GetJobEnvironmentKeyValue("OPENPYPE_VERSION") + if requested_version: + print(("Scanning for compatible requested " + f"version {requested_version}")) + install_dir = DirectoryUtils.SearchDirectoryList(dir_list) + if dir: + sub_dirs = [ + f.path for f in os.scandir(install_dir) + if f.is_dir() + ] + for subdir in sub_dirs: + version = get_openpype_version_from_path(subdir) + if not version: + continue + openpype_versions.append((version, subdir)) + + exe = FileUtils.SearchFileList(exe_list) + if openpype_versions: + # if looking for requested compatible version, + # add the implicitly specified to the list too. + version = get_openpype_version_from_path( + os.path.dirname(exe)) + if version: + openpype_versions.append((version, os.path.dirname(exe))) + + if requested_version: + # sort detected versions + if openpype_versions: + openpype_versions.sort(key=lambda ver: ver[0]) + requested_major, requested_minor, _ = requested_version.split(".")[:3] # noqa: E501 + compatible_versions = [] + for version in openpype_versions: + v = version[0].split(".")[:3] + if v[0] == requested_major and v[1] == requested_minor: + compatible_versions.append(version) + if not compatible_versions: + raise RuntimeError( + ("Cannot find compatible version available " + "for version {} requested by the job. " + "Please add it through plugin configuration " + "in Deadline or install it to configured " + "directory.").format(requested_version)) + # sort compatible versions nad pick the last one + compatible_versions.sort(key=lambda ver: ver[0]) + # create list of executables for different platform and let + # Deadline decide. + exe_list = [ + os.path.join( + compatible_versions[-1][1], "openpype_console.exe"), + os.path.join( + compatible_versions[-1][1], "openpype_console") + ] + exe = FileUtils.SearchFileList(";".join(exe_list)) + if exe == "": raise RuntimeError( "OpenPype executable was not found " + "in the semicolon separated list \"" + exe_list + "\". " + "The path to the render executable can be configured " + "from the Plugin Configuration in the Deadline Monitor.") - print("--- OpenPype executable: {}".format(openpype_app)) + print("--- OpenPype executable: {}".format(exe)) # tempfile.TemporaryFile cannot be used because of locking temp_file_name = "{}_{}.json".format( @@ -45,7 +116,7 @@ def inject_openpype_environment(deadlinePlugin): print(">>> Temporary path: {}".format(export_url)) args = [ - openpype_app, + exe, "--headless", 'extractenvironments', export_url @@ -77,7 +148,7 @@ def inject_openpype_environment(deadlinePlugin): print(">>> Executing: {}".format(args)) std_output = subprocess.check_output(args, - cwd=os.path.dirname(openpype_app), + cwd=os.path.dirname(exe), env=env) print(">>> Process result {}".format(std_output)) diff --git a/openpype/modules/deadline/repository/custom/plugins/OpenPype/OpenPype.param b/openpype/modules/deadline/repository/custom/plugins/OpenPype/OpenPype.param index 8bd6dce12d..b3ac18e20c 100644 --- a/openpype/modules/deadline/repository/custom/plugins/OpenPype/OpenPype.param +++ b/openpype/modules/deadline/repository/custom/plugins/OpenPype/OpenPype.param @@ -7,11 +7,20 @@ Index=0 Default=OpenPype Plugin for Deadline Description=Not configurable +[OpenPypeInstallationDirs] +Type=multilinemultifolder +Label=Directories where OpenPype versions are installed +Category=OpenPype Installation Directories +CategoryOrder=0 +Index=0 +Default=C:\Program Files (x86)\OpenPype +Description=Path or paths to directories where multiple versions of OpenPype might be installed. Enter every such path on separate lines. + [OpenPypeExecutable] Type=multilinemultifilename Label=OpenPype Executable Category=OpenPype Executables -CategoryOrder=0 +CategoryOrder=1 Index=0 Default= Description=The path to the OpenPype executable. Enter alternative paths on separate lines. diff --git a/openpype/modules/deadline/repository/custom/plugins/OpenPype/OpenPype.py b/openpype/modules/deadline/repository/custom/plugins/OpenPype/OpenPype.py index 451d71fb63..b84560f175 100644 --- a/openpype/modules/deadline/repository/custom/plugins/OpenPype/OpenPype.py +++ b/openpype/modules/deadline/repository/custom/plugins/OpenPype/OpenPype.py @@ -1,10 +1,18 @@ +#!/usr/bin/env python3 + from System.IO import Path from System.Text.RegularExpressions import Regex from Deadline.Plugins import PluginType, DeadlinePlugin -from Deadline.Scripting import StringUtils, FileUtils, RepositoryUtils +from Deadline.Scripting import ( + StringUtils, + FileUtils, + DirectoryUtils, + RepositoryUtils +) import re +import os ###################################################################### @@ -52,13 +60,83 @@ class OpenPypeDeadlinePlugin(DeadlinePlugin): self.AddStdoutHandlerCallback( ".*Progress: (\d+)%.*").HandleCallback += self.HandleProgress + @staticmethod + def get_openpype_version_from_path(path): + version_file = os.path.join(path, "openpype", "version.py") + if not os.path.isfile(version_file): + return None + version = {} + with open(version_file, "r") as vf: + exec(vf.read(), version) + + version_match = re.search(r"(\d+\.\d+.\d+).*", version["__version__"]) + return version_match[1] + def RenderExecutable(self): - exeList = self.GetConfigEntry("OpenPypeExecutable") - exe = FileUtils.SearchFileList(exeList) + job = self.GetJob() + openpype_versions = [] + # if the job requires specific OpenPype version, + # lets go over all available and find compatible build. + requested_version = job.GetJobEnvironmentKeyValue("OPENPYPE_VERSION") + if requested_version: + self.LogInfo(( + "Scanning for compatible requested " + f"version {requested_version}")) + dir_list = self.GetConfigEntry("OpenPypeInstallationDirs") + install_dir = DirectoryUtils.SearchDirectoryList(dir_list) + if dir: + sub_dirs = [ + f.path for f in os.scandir(install_dir) + if f.is_dir() + ] + for subdir in sub_dirs: + version = self.get_openpype_version_from_path(subdir) + if not version: + continue + openpype_versions.append((version, subdir)) + + exe_list = self.GetConfigEntry("OpenPypeExecutable") + exe = FileUtils.SearchFileList(exe_list) + if openpype_versions: + # if looking for requested compatible version, + # add the implicitly specified to the list too. + version = self.get_openpype_version_from_path( + os.path.dirname(exe)) + if version: + openpype_versions.append((version, os.path.dirname(exe))) + + if requested_version: + # sort detected versions + if openpype_versions: + openpype_versions.sort(key=lambda ver: ver[0]) + requested_major, requested_minor, _ = requested_version.split(".")[:3] # noqa: E501 + compatible_versions = [] + for version in openpype_versions: + v = version[0].split(".")[:3] + if v[0] == requested_major and v[1] == requested_minor: + compatible_versions.append(version) + if not compatible_versions: + self.FailRender(("Cannot find compatible version available " + "for version {} requested by the job. " + "Please add it through plugin configuration " + "in Deadline or install it to configured " + "directory.").format(requested_version)) + # sort compatible versions nad pick the last one + compatible_versions.sort(key=lambda ver: ver[0]) + # create list of executables for different platform and let + # Deadline decide. + exe_list = [ + os.path.join( + compatible_versions[-1][1], "openpype_console.exe"), + os.path.join( + compatible_versions[-1][1], "openpype_console") + ] + exe = FileUtils.SearchFileList(";".join(exe_list)) + if exe == "": self.FailRender( "OpenPype executable was not found " + - "in the semicolon separated list \"" + exeList + "\". " + + "in the semicolon separated list \"" + exe_list + "\". " + "The path to the render executable can be configured " + "from the Plugin Configuration in the Deadline Monitor.") return exe From 8a55a83d7dc835da2d5f6416aa66686aedb922d4 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 1 Aug 2022 18:38:54 +0200 Subject: [PATCH 650/785] added settings to be able fill empty intent and define it's label --- .../settings/defaults/system_settings/modules.json | 5 +++-- .../module_settings/schema_ftrack.json | 14 +++++++++++--- 2 files changed, 14 insertions(+), 5 deletions(-) diff --git a/openpype/settings/defaults/system_settings/modules.json b/openpype/settings/defaults/system_settings/modules.json index 8cd4114cb0..a3cf98f3ed 100644 --- a/openpype/settings/defaults/system_settings/modules.json +++ b/openpype/settings/defaults/system_settings/modules.json @@ -26,13 +26,14 @@ "linux": [] }, "intent": { + "allow_empty_intent": true, + "empty_intent_label": "", "items": { - "-": "-", "wip": "WIP", "final": "Final", "test": "Test" }, - "default": "-" + "default": "" }, "custom_attributes": { "show": { diff --git a/openpype/settings/entities/schemas/system_schema/module_settings/schema_ftrack.json b/openpype/settings/entities/schemas/system_schema/module_settings/schema_ftrack.json index 654ddf2938..7c5774415c 100644 --- a/openpype/settings/entities/schemas/system_schema/module_settings/schema_ftrack.json +++ b/openpype/settings/entities/schemas/system_schema/module_settings/schema_ftrack.json @@ -50,8 +50,15 @@ "is_group": true, "children": [ { - "type": "label", - "label": "Intent" + "type": "boolean", + "key": "allow_empty_intent", + "label": "Allow empty intent" + }, + { + "type": "text", + "key": "empty_intent_label", + "label": "Empty item label", + "placeholder": "< Not set >" }, { "type": "dict-modifiable", @@ -64,7 +71,8 @@ { "key": "default", "type": "text", - "label": "Default Intent" + "label": "Default Intent", + "placeholder": "< First available >" }, { "type": "separator" From a591ea92efd534baf14d5f9fc549ba65dabc9894 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 1 Aug 2022 18:39:45 +0200 Subject: [PATCH 651/785] changed model in pype publisher to use new settings --- openpype/tools/pyblish_pype/model.py | 28 ++++++++++++++++++++-------- 1 file changed, 20 insertions(+), 8 deletions(-) diff --git a/openpype/tools/pyblish_pype/model.py b/openpype/tools/pyblish_pype/model.py index 2931a379b3..31aa63677e 100644 --- a/openpype/tools/pyblish_pype/model.py +++ b/openpype/tools/pyblish_pype/model.py @@ -86,7 +86,7 @@ class IntentModel(QtGui.QStandardItemModel): First and default value is {"< Not Set >": None} """ - default_item = {"< Not Set >": None} + default_empty_label = "< Not set >" def __init__(self, parent=None): super(IntentModel, self).__init__(parent) @@ -102,27 +102,39 @@ class IntentModel(QtGui.QStandardItemModel): self._item_count = 0 self.default_index = 0 - intents_preset = ( + intent_settings = ( get_system_settings() .get("modules", {}) .get("ftrack", {}) .get("intent", {}) ) - default = intents_preset.get("default") - items = intents_preset.get("items", {}) + items = intent_settings.get("items", {}) if not items: return - for idx, item_value in enumerate(items.keys()): + allow_empty_intent = intent_settings.get("allow_empty_intent", True) + empty_intent_label = ( + intent_settings.get("empty_intent_label") + or self.default_empty_label + ) + listed_items = list(items.items()) + if allow_empty_intent: + listed_items.insert(0, ("", empty_intent_label)) + + default = intent_settings.get("default") + + for idx, item in enumerate(listed_items): + item_value = item[0] if item_value == default: self.default_index = idx break - self.add_items(items) + self._add_items(listed_items) - def add_items(self, items): - for value, label in items.items(): + def _add_items(self, items): + for item in items: + value, label = item new_item = QtGui.QStandardItem() new_item.setData(label, QtCore.Qt.DisplayRole) new_item.setData(value, Roles.IntentItemValue) From 23601cb2448437be40ac215ef1584080de2a5205 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 1 Aug 2022 18:40:28 +0200 Subject: [PATCH 652/785] unset intent from context if empty item is used --- openpype/tools/pyblish_pype/window.py | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/openpype/tools/pyblish_pype/window.py b/openpype/tools/pyblish_pype/window.py index 78590259bc..e167405325 100644 --- a/openpype/tools/pyblish_pype/window.py +++ b/openpype/tools/pyblish_pype/window.py @@ -523,6 +523,7 @@ class Window(QtWidgets.QDialog): instance_item.setData(enable_value, Roles.IsEnabledRole) def _add_intent_to_context(self): + context_value = None if ( self.intent_model.has_items and "intent" not in self.controller.context.data @@ -530,11 +531,17 @@ class Window(QtWidgets.QDialog): idx = self.intent_model.index(self.intent_box.currentIndex(), 0) intent_value = self.intent_model.data(idx, Roles.IntentItemValue) intent_label = self.intent_model.data(idx, QtCore.Qt.DisplayRole) + if intent_value: + context_value = { + "value": intent_value, + "label": intent_label + } - self.controller.context.data["intent"] = { - "value": intent_value, - "label": intent_label - } + # Unset intent if is set to empty value + if context_value is None: + self.controller.context.data.pop("intent", None) + else: + self.controller.context.data["intent"] = context_value def on_instance_toggle(self, index, state=None): """An item is requesting to be toggled""" From 845d04686f0d586671e12b8bfdeda5b605dc438d Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Tue, 2 Aug 2022 15:05:13 +0800 Subject: [PATCH 653/785] bugfix for validating look data contents with custom attribute on group --- .../hosts/maya/plugins/publish/validate_look_contents.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/validate_look_contents.py b/openpype/hosts/maya/plugins/publish/validate_look_contents.py index 443a0ad719..8aa88a75d3 100644 --- a/openpype/hosts/maya/plugins/publish/validate_look_contents.py +++ b/openpype/hosts/maya/plugins/publish/validate_look_contents.py @@ -76,12 +76,12 @@ class ValidateLookContents(pyblish.api.InstancePlugin): "`relationships`" % instance.name) invalid.add(instance.name) - # Check if attributes are on a node with an ID, crucial for rebuild! + # Check if attributes are on a node with a name and an ID, crucial for rebuild! for attr_changes in lookdata["attributes"]: - if not attr_changes["uuid"]: + if not attr_changes["uuid"] and not attr_changes["name"]: cls.log.error("Node '%s' has no cbId, please set the " - "attributes to its children if it has any" - % attr_changes["name"]) + "attributes to its children if it has any" + % attr_changes["name"]) invalid.add(instance.name) return list(invalid) From 674b3900ac56607392e28be1e7f444a62e24b2ac Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Tue, 2 Aug 2022 15:07:38 +0800 Subject: [PATCH 654/785] bugfix for validating look data contents with custom attribute on group --- openpype/hosts/maya/plugins/publish/validate_look_contents.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/validate_look_contents.py b/openpype/hosts/maya/plugins/publish/validate_look_contents.py index 8aa88a75d3..9eb965970a 100644 --- a/openpype/hosts/maya/plugins/publish/validate_look_contents.py +++ b/openpype/hosts/maya/plugins/publish/validate_look_contents.py @@ -76,9 +76,9 @@ class ValidateLookContents(pyblish.api.InstancePlugin): "`relationships`" % instance.name) invalid.add(instance.name) - # Check if attributes are on a node with a name and an ID, crucial for rebuild! + # Check if attributes are on a node with an ID, crucial for rebuild! for attr_changes in lookdata["attributes"]: - if not attr_changes["uuid"] and not attr_changes["name"]: + if not attr_changes["uuid"] and not attr_changes["attributes"]: cls.log.error("Node '%s' has no cbId, please set the " "attributes to its children if it has any" % attr_changes["name"]) From 8120e9d66bbd911a4e4722e6a1fb5c06a572af71 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Tue, 2 Aug 2022 15:31:33 +0800 Subject: [PATCH 655/785] bugfix for validating look data contents with custom attribute on group --- .../hosts/maya/plugins/publish/validate_look_contents.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/validate_look_contents.py b/openpype/hosts/maya/plugins/publish/validate_look_contents.py index 9eb965970a..01d7a9ef2f 100644 --- a/openpype/hosts/maya/plugins/publish/validate_look_contents.py +++ b/openpype/hosts/maya/plugins/publish/validate_look_contents.py @@ -76,16 +76,15 @@ class ValidateLookContents(pyblish.api.InstancePlugin): "`relationships`" % instance.name) invalid.add(instance.name) - # Check if attributes are on a node with an ID, crucial for rebuild! + # Check if attributes are on a node with an attirbute and an ID, crucial for rebuild! for attr_changes in lookdata["attributes"]: if not attr_changes["uuid"] and not attr_changes["attributes"]: cls.log.error("Node '%s' has no cbId, please set the " - "attributes to its children if it has any" - % attr_changes["name"]) + "attributes to its children if it has any" + % attr_changes["name"]) invalid.add(instance.name) return list(invalid) - @classmethod def validate_looks(cls, instance): From 39975a7335f1c27c3764518a37ac0c304b347363 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Tue, 2 Aug 2022 15:32:35 +0800 Subject: [PATCH 656/785] bugfix for validating look data contents with custom attribute on group --- openpype/hosts/maya/plugins/publish/validate_look_contents.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/publish/validate_look_contents.py b/openpype/hosts/maya/plugins/publish/validate_look_contents.py index 01d7a9ef2f..b1e1d5416b 100644 --- a/openpype/hosts/maya/plugins/publish/validate_look_contents.py +++ b/openpype/hosts/maya/plugins/publish/validate_look_contents.py @@ -76,7 +76,7 @@ class ValidateLookContents(pyblish.api.InstancePlugin): "`relationships`" % instance.name) invalid.add(instance.name) - # Check if attributes are on a node with an attirbute and an ID, crucial for rebuild! + # Check if attributes are on a node with an ID, crucial for rebuild! for attr_changes in lookdata["attributes"]: if not attr_changes["uuid"] and not attr_changes["attributes"]: cls.log.error("Node '%s' has no cbId, please set the " From 3d7e1953075809af9323951046fc3d321da8352b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Tue, 2 Aug 2022 11:26:33 +0200 Subject: [PATCH 657/785] :recycle: skip non-existent local path when finding local version, stop crashing if directory to search doesn't exist - this will allow to just use build version --- igniter/bootstrap_repos.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/igniter/bootstrap_repos.py b/igniter/bootstrap_repos.py index 47f2525952..750b2f1bf7 100644 --- a/igniter/bootstrap_repos.py +++ b/igniter/bootstrap_repos.py @@ -514,10 +514,10 @@ class OpenPypeVersion(semver.VersionInfo): ValueError: if invalid path is specified. """ - if not openpype_dir.exists() and not openpype_dir.is_dir(): - raise ValueError("specified directory is invalid") - _openpype_versions = [] + if not openpype_dir.exists() and not openpype_dir.is_dir(): + return _openpype_versions + # iterate over directory in first level and find all that might # contain OpenPype. for item in openpype_dir.iterdir(): From 89bd23856c30e39f2493d99b2c743d3b918cccda Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 2 Aug 2022 12:25:51 +0200 Subject: [PATCH 658/785] OP-3405 - refactor - updated methods signature Renamed collection to project_name as when we are leaving MongoDB, collection doesnt make much sense. --- .../providers/abstract_provider.py | 8 +- .../modules/sync_server/providers/dropbox.py | 12 +- .../modules/sync_server/providers/gdrive.py | 16 +- .../sync_server/providers/local_drive.py | 12 +- .../modules/sync_server/providers/sftp.py | 16 +- openpype/modules/sync_server/sync_server.py | 71 +++---- .../modules/sync_server/sync_server_module.py | 189 +++++++++--------- openpype/modules/sync_server/tray/models.py | 2 +- 8 files changed, 164 insertions(+), 162 deletions(-) diff --git a/openpype/modules/sync_server/providers/abstract_provider.py b/openpype/modules/sync_server/providers/abstract_provider.py index 688a17f14f..8c2fe1cad9 100644 --- a/openpype/modules/sync_server/providers/abstract_provider.py +++ b/openpype/modules/sync_server/providers/abstract_provider.py @@ -62,7 +62,7 @@ class AbstractProvider: @abc.abstractmethod def upload_file(self, source_path, path, - server, collection, file, representation, site, + server, project_name, file, representation, site, overwrite=False): """ Copy file from 'source_path' to 'target_path' on provider. @@ -75,7 +75,7 @@ class AbstractProvider: arguments for saving progress: server (SyncServer): server instance to call update_db on - collection (str): name of collection + project_name (str): name of project_name file (dict): info about uploaded file (matches structure from db) representation (dict): complete repre containing 'file' site (str): site name @@ -87,7 +87,7 @@ class AbstractProvider: @abc.abstractmethod def download_file(self, source_path, local_path, - server, collection, file, representation, site, + server, project_name, file, representation, site, overwrite=False): """ Download file from provider into local system @@ -99,7 +99,7 @@ class AbstractProvider: arguments for saving progress: server (SyncServer): server instance to call update_db on - collection (str): name of collection + project_name (str): file (dict): info about uploaded file (matches structure from db) representation (dict): complete repre containing 'file' site (str): site name diff --git a/openpype/modules/sync_server/providers/dropbox.py b/openpype/modules/sync_server/providers/dropbox.py index dfc42fed75..89d6990841 100644 --- a/openpype/modules/sync_server/providers/dropbox.py +++ b/openpype/modules/sync_server/providers/dropbox.py @@ -224,7 +224,7 @@ class DropboxHandler(AbstractProvider): return False def upload_file(self, source_path, path, - server, collection, file, representation, site, + server, project_name, file, representation, site, overwrite=False): """ Copy file from 'source_path' to 'target_path' on provider. @@ -237,7 +237,7 @@ class DropboxHandler(AbstractProvider): arguments for saving progress: server (SyncServer): server instance to call update_db on - collection (str): name of collection + project_name (str): file (dict): info about uploaded file (matches structure from db) representation (dict): complete repre containing 'file' site (str): site name @@ -290,7 +290,7 @@ class DropboxHandler(AbstractProvider): cursor.offset = f.tell() server.update_db( - collection=collection, + project_name=project_name, new_file_id=None, file=file, representation=representation, @@ -301,7 +301,7 @@ class DropboxHandler(AbstractProvider): return path def download_file(self, source_path, local_path, - server, collection, file, representation, site, + server, project_name, file, representation, site, overwrite=False): """ Download file from provider into local system @@ -313,7 +313,7 @@ class DropboxHandler(AbstractProvider): arguments for saving progress: server (SyncServer): server instance to call update_db on - collection (str): name of collection + project_name (str): file (dict): info about uploaded file (matches structure from db) representation (dict): complete repre containing 'file' site (str): site name @@ -337,7 +337,7 @@ class DropboxHandler(AbstractProvider): self.dbx.files_download_to_file(local_path, source_path) server.update_db( - collection=collection, + project_name=project_name, new_file_id=None, file=file, representation=representation, diff --git a/openpype/modules/sync_server/providers/gdrive.py b/openpype/modules/sync_server/providers/gdrive.py index aa7329b104..bef707788b 100644 --- a/openpype/modules/sync_server/providers/gdrive.py +++ b/openpype/modules/sync_server/providers/gdrive.py @@ -251,7 +251,7 @@ class GDriveHandler(AbstractProvider): return folder_id def upload_file(self, source_path, path, - server, collection, file, representation, site, + server, project_name, file, representation, site, overwrite=False): """ Uploads single file from 'source_path' to destination 'path'. @@ -264,7 +264,7 @@ class GDriveHandler(AbstractProvider): arguments for saving progress: server (SyncServer): server instance to call update_db on - collection (str): name of collection + project_name (str): file (dict): info about uploaded file (matches structure from db) representation (dict): complete repre containing 'file' site (str): site name @@ -324,7 +324,7 @@ class GDriveHandler(AbstractProvider): while response is None: if server.is_representation_paused(representation['_id'], check_parents=True, - project_name=collection): + project_name=project_name): raise ValueError("Paused during process, please redo.") if status: status_val = float(status.progress()) @@ -333,7 +333,7 @@ class GDriveHandler(AbstractProvider): last_tick = time.time() log.debug("Uploaded %d%%." % int(status_val * 100)) - server.update_db(collection=collection, + server.update_db(project_name=project_name, new_file_id=None, file=file, representation=representation, @@ -358,7 +358,7 @@ class GDriveHandler(AbstractProvider): return response['id'] def download_file(self, source_path, local_path, - server, collection, file, representation, site, + server, project_name, file, representation, site, overwrite=False): """ Downloads single file from 'source_path' (remote) to 'local_path'. @@ -372,7 +372,7 @@ class GDriveHandler(AbstractProvider): arguments for saving progress: server (SyncServer): server instance to call update_db on - collection (str): name of collection + project_name (str): file (dict): info about uploaded file (matches structure from db) representation (dict): complete repre containing 'file' site (str): site name @@ -410,7 +410,7 @@ class GDriveHandler(AbstractProvider): while response is None: if server.is_representation_paused(representation['_id'], check_parents=True, - project_name=collection): + project_name=project_name): raise ValueError("Paused during process, please redo.") if status: status_val = float(status.progress()) @@ -419,7 +419,7 @@ class GDriveHandler(AbstractProvider): last_tick = time.time() log.debug("Downloaded %d%%." % int(status_val * 100)) - server.update_db(collection=collection, + server.update_db(project_name=project_name, new_file_id=None, file=file, representation=representation, diff --git a/openpype/modules/sync_server/providers/local_drive.py b/openpype/modules/sync_server/providers/local_drive.py index 172cb338cf..4951ef4d1a 100644 --- a/openpype/modules/sync_server/providers/local_drive.py +++ b/openpype/modules/sync_server/providers/local_drive.py @@ -82,7 +82,7 @@ class LocalDriveHandler(AbstractProvider): return editable def upload_file(self, source_path, target_path, - server, collection, file, representation, site, + server, project_name, file, representation, site, overwrite=False, direction="Upload"): """ Copies file from 'source_path' to 'target_path' @@ -95,7 +95,7 @@ class LocalDriveHandler(AbstractProvider): thread = threading.Thread(target=self._copy, args=(source_path, target_path)) thread.start() - self._mark_progress(collection, file, representation, server, + self._mark_progress(project_name, file, representation, server, site, source_path, target_path, direction) else: if os.path.exists(target_path): @@ -105,13 +105,13 @@ class LocalDriveHandler(AbstractProvider): return os.path.basename(target_path) def download_file(self, source_path, local_path, - server, collection, file, representation, site, + server, project_name, file, representation, site, overwrite=False): """ Download a file form 'source_path' to 'local_path' """ return self.upload_file(source_path, local_path, - server, collection, file, representation, site, + server, project_name, file, representation, site, overwrite, direction="Download") def delete_file(self, path): @@ -188,7 +188,7 @@ class LocalDriveHandler(AbstractProvider): except shutil.SameFileError: print("same files, skipping") - def _mark_progress(self, collection, file, representation, server, site, + def _mark_progress(self, project_name, file, representation, server, site, source_path, target_path, direction): """ Updates progress field in DB by values 0-1. @@ -204,7 +204,7 @@ class LocalDriveHandler(AbstractProvider): status_val = target_file_size / source_file_size last_tick = time.time() log.debug(direction + "ed %d%%." % int(status_val * 100)) - server.update_db(collection=collection, + server.update_db(project_name=project_name, new_file_id=None, file=file, representation=representation, diff --git a/openpype/modules/sync_server/providers/sftp.py b/openpype/modules/sync_server/providers/sftp.py index 49b87b14ec..302ffae3e6 100644 --- a/openpype/modules/sync_server/providers/sftp.py +++ b/openpype/modules/sync_server/providers/sftp.py @@ -222,7 +222,7 @@ class SFTPHandler(AbstractProvider): return os.path.basename(path) def upload_file(self, source_path, target_path, - server, collection, file, representation, site, + server, project_name, file, representation, site, overwrite=False): """ Uploads single file from 'source_path' to destination 'path'. @@ -235,7 +235,7 @@ class SFTPHandler(AbstractProvider): arguments for saving progress: server (SyncServer): server instance to call update_db on - collection (str): name of collection + project_name (str): file (dict): info about uploaded file (matches structure from db) representation (dict): complete repre containing 'file' site (str): site name @@ -256,7 +256,7 @@ class SFTPHandler(AbstractProvider): thread = threading.Thread(target=self._upload, args=(source_path, target_path)) thread.start() - self._mark_progress(collection, file, representation, server, + self._mark_progress(project_name, file, representation, server, site, source_path, target_path, "upload") return os.path.basename(target_path) @@ -267,7 +267,7 @@ class SFTPHandler(AbstractProvider): conn.put(source_path, target_path) def download_file(self, source_path, target_path, - server, collection, file, representation, site, + server, project_name, file, representation, site, overwrite=False): """ Downloads single file from 'source_path' (remote) to 'target_path'. @@ -281,7 +281,7 @@ class SFTPHandler(AbstractProvider): arguments for saving progress: server (SyncServer): server instance to call update_db on - collection (str): name of collection + project_name (str): file (dict): info about uploaded file (matches structure from db) representation (dict): complete repre containing 'file' site (str): site name @@ -302,7 +302,7 @@ class SFTPHandler(AbstractProvider): thread = threading.Thread(target=self._download, args=(source_path, target_path)) thread.start() - self._mark_progress(collection, file, representation, server, + self._mark_progress(project_name, file, representation, server, site, source_path, target_path, "download") return os.path.basename(target_path) @@ -425,7 +425,7 @@ class SFTPHandler(AbstractProvider): pysftp.exceptions.ConnectionException): log.warning("Couldn't connect", exc_info=True) - def _mark_progress(self, collection, file, representation, server, site, + def _mark_progress(self, project_name, file, representation, server, site, source_path, target_path, direction): """ Updates progress field in DB by values 0-1. @@ -446,7 +446,7 @@ class SFTPHandler(AbstractProvider): status_val = target_file_size / source_file_size last_tick = time.time() log.debug(direction + "ed %d%%." % int(status_val * 100)) - server.update_db(collection=collection, + server.update_db(project_name=project_name, new_file_id=None, file=file, representation=representation, diff --git a/openpype/modules/sync_server/sync_server.py b/openpype/modules/sync_server/sync_server.py index 356a75f99d..9cc55ec562 100644 --- a/openpype/modules/sync_server/sync_server.py +++ b/openpype/modules/sync_server/sync_server.py @@ -14,7 +14,7 @@ from .utils import SyncStatus, ResumableError log = PypeLogger().get_logger("SyncServer") -async def upload(module, collection, file, representation, provider_name, +async def upload(module, project_name, file, representation, provider_name, remote_site_name, tree=None, preset=None): """ Upload single 'file' of a 'representation' to 'provider'. @@ -31,7 +31,7 @@ async def upload(module, collection, file, representation, provider_name, Args: module(SyncServerModule): object to run SyncServerModule API - collection (str): source collection + project_name (str): source db file (dictionary): of file from representation in Mongo representation (dictionary): of representation provider_name (string): gdrive, gdc etc. @@ -47,7 +47,7 @@ async def upload(module, collection, file, representation, provider_name, # thread can do that at a time, upload/download to prepared # structure should be run in parallel remote_handler = lib.factory.get_provider(provider_name, - collection, + project_name, remote_site_name, tree=tree, presets=preset) @@ -55,7 +55,7 @@ async def upload(module, collection, file, representation, provider_name, file_path = file.get("path", "") try: local_file_path, remote_file_path = resolve_paths(module, - file_path, collection, remote_site_name, remote_handler + file_path, project_name, remote_site_name, remote_handler ) except Exception as exp: print(exp) @@ -74,27 +74,28 @@ async def upload(module, collection, file, representation, provider_name, local_file_path, remote_file_path, module, - collection, + project_name, file, representation, remote_site_name, True ) - module.handle_alternate_site(collection, representation, remote_site_name, + module.handle_alternate_site(project_name, representation, + remote_site_name, file["_id"], file_id) return file_id -async def download(module, collection, file, representation, provider_name, +async def download(module, project_name, file, representation, provider_name, remote_site_name, tree=None, preset=None): """ Downloads file to local folder denoted in representation.Context. Args: module(SyncServerModule): object to run SyncServerModule API - collection (str): source collection + project_name (str): source file (dictionary) : info about processed file representation (dictionary): repr that 'file' belongs to provider_name (string): 'gdrive' etc @@ -108,20 +109,20 @@ async def download(module, collection, file, representation, provider_name, """ with module.lock: remote_handler = lib.factory.get_provider(provider_name, - collection, + project_name, remote_site_name, tree=tree, presets=preset) file_path = file.get("path", "") local_file_path, remote_file_path = resolve_paths( - module, file_path, collection, remote_site_name, remote_handler + module, file_path, project_name, remote_site_name, remote_handler ) local_folder = os.path.dirname(local_file_path) os.makedirs(local_folder, exist_ok=True) - local_site = module.get_active_site(collection) + local_site = module.get_active_site(project_name) loop = asyncio.get_running_loop() file_id = await loop.run_in_executor(None, @@ -129,20 +130,20 @@ async def download(module, collection, file, representation, provider_name, remote_file_path, local_file_path, module, - collection, + project_name, file, representation, local_site, True ) - module.handle_alternate_site(collection, representation, local_site, + module.handle_alternate_site(project_name, representation, local_site, file["_id"], file_id) return file_id -def resolve_paths(module, file_path, collection, +def resolve_paths(module, file_path, project_name, remote_site_name=None, remote_handler=None): """ Returns tuple of local and remote file paths with {root} @@ -153,7 +154,7 @@ def resolve_paths(module, file_path, collection, Args: module(SyncServerModule): object to run SyncServerModule API file_path(string): path with {root} - collection(string): project name + project_name(string): project name remote_site_name(string): remote site remote_handler(AbstractProvider): implementation Returns: @@ -164,7 +165,7 @@ def resolve_paths(module, file_path, collection, remote_file_path = remote_handler.resolve_path(file_path) local_handler = lib.factory.get_provider( - 'local_drive', collection, module.get_active_site(collection)) + 'local_drive', project_name, module.get_active_site(project_name)) local_file_path = local_handler.resolve_path(file_path) return local_file_path, remote_file_path @@ -269,7 +270,7 @@ class SyncServerThread(threading.Thread): - gets list of collections in DB - gets list of active remote providers (has configuration, credentials) - - for each collection it looks for representations that should + - for each project_name it looks for representations that should be synced - synchronize found collections - update representations - fills error messages for exceptions @@ -282,17 +283,17 @@ class SyncServerThread(threading.Thread): import time start_time = time.time() self.module.set_sync_project_settings() # clean cache - collection = None + project_name = None enabled_projects = self.module.get_enabled_projects() - for collection in enabled_projects: - preset = self.module.sync_project_settings[collection] + for project_name in enabled_projects: + preset = self.module.sync_project_settings[project_name] - local_site, remote_site = self._working_sites(collection) + local_site, remote_site = self._working_sites(project_name) if not all([local_site, remote_site]): continue sync_repres = self.module.get_sync_representations( - collection, + project_name, local_site, remote_site ) @@ -310,7 +311,7 @@ class SyncServerThread(threading.Thread): remote_provider = \ self.module.get_provider_for_site(site=remote_site) handler = lib.factory.get_provider(remote_provider, - collection, + project_name, remote_site, presets=site_preset) limit = lib.factory.get_provider_batch_limit( @@ -341,7 +342,7 @@ class SyncServerThread(threading.Thread): limit -= 1 task = asyncio.create_task( upload(self.module, - collection, + project_name, file, sync, remote_provider, @@ -353,7 +354,7 @@ class SyncServerThread(threading.Thread): files_processed_info.append((file, sync, remote_site, - collection + project_name )) processed_file_path.add(file_path) if status == SyncStatus.DO_DOWNLOAD: @@ -361,7 +362,7 @@ class SyncServerThread(threading.Thread): limit -= 1 task = asyncio.create_task( download(self.module, - collection, + project_name, file, sync, remote_provider, @@ -373,7 +374,7 @@ class SyncServerThread(threading.Thread): files_processed_info.append((file, sync, local_site, - collection + project_name )) processed_file_path.add(file_path) @@ -384,12 +385,12 @@ class SyncServerThread(threading.Thread): return_exceptions=True) for file_id, info in zip(files_created, files_processed_info): - file, representation, site, collection = info + file, representation, site, project_name = info error = None if isinstance(file_id, BaseException): error = str(file_id) file_id = None - self.module.update_db(collection, + self.module.update_db(project_name, file_id, file, representation, @@ -399,7 +400,7 @@ class SyncServerThread(threading.Thread): duration = time.time() - start_time log.debug("One loop took {:.2f}s".format(duration)) - delay = self.module.get_loop_delay(collection) + delay = self.module.get_loop_delay(project_name) log.debug("Waiting for {} seconds to new loop".format(delay)) self.timer = asyncio.create_task(self.run_timer(delay)) await asyncio.gather(self.timer) @@ -458,19 +459,19 @@ class SyncServerThread(threading.Thread): self.timer.cancel() self.timer = None - def _working_sites(self, collection): - if self.module.is_project_paused(collection): + def _working_sites(self, project_name): + if self.module.is_project_paused(project_name): log.debug("Both sites same, skipping") return None, None - local_site = self.module.get_active_site(collection) - remote_site = self.module.get_remote_site(collection) + local_site = self.module.get_active_site(project_name) + remote_site = self.module.get_remote_site(project_name) if local_site == remote_site: log.debug("{}-{} sites same, skipping".format(local_site, remote_site)) return None, None - configured_sites = _get_configured_sites(self.module, collection) + configured_sites = _get_configured_sites(self.module, project_name) if not all([local_site in configured_sites, remote_site in configured_sites]): log.debug("Some of the sites {} - {} is not ".format(local_site, diff --git a/openpype/modules/sync_server/sync_server_module.py b/openpype/modules/sync_server/sync_server_module.py index 71e35c7839..c4d90416bb 100644 --- a/openpype/modules/sync_server/sync_server_module.py +++ b/openpype/modules/sync_server/sync_server_module.py @@ -130,12 +130,12 @@ class SyncServerModule(OpenPypeModule, ITrayModule): self.projects_processed = set() """ Start of Public API """ - def add_site(self, collection, representation_id, site_name=None, + def add_site(self, project_name, representation_id, site_name=None, force=False): """ Adds new site to representation to be synced. - 'collection' must have synchronization enabled (globally or + 'project_name' must have synchronization enabled (globally or project only) Used as a API endpoint from outside applications (Loader etc). @@ -143,7 +143,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): Use 'force' to reset existing site. Args: - collection (string): project name (must match DB) + project_name (string): project name (must match DB) representation_id (string): MongoDB _id value site_name (string): name of configured and active site force (bool): reset site if exists @@ -153,25 +153,25 @@ class SyncServerModule(OpenPypeModule, ITrayModule): not 'force' ValueError - other errors (repre not found, misconfiguration) """ - if not self.get_sync_project_setting(collection): + if not self.get_sync_project_setting(project_name): raise ValueError("Project not configured") if not site_name: site_name = self.DEFAULT_SITE - self.reset_site_on_representation(collection, + self.reset_site_on_representation(project_name, representation_id, site_name=site_name, force=force) - def remove_site(self, collection, representation_id, site_name, + def remove_site(self, project_name, representation_id, site_name, remove_local_files=False): """ Removes 'site_name' for particular 'representation_id' on - 'collection' + 'project_name' Args: - collection (string): project name (must match DB) + project_name (string): project name (must match DB) representation_id (string): MongoDB _id value site_name (string): name of configured and active site remove_local_files (bool): remove only files for 'local_id' @@ -180,15 +180,15 @@ class SyncServerModule(OpenPypeModule, ITrayModule): Returns: throws ValueError if any issue """ - if not self.get_sync_project_setting(collection): + if not self.get_sync_project_setting(project_name): raise ValueError("Project not configured") - self.reset_site_on_representation(collection, + self.reset_site_on_representation(project_name, representation_id, site_name=site_name, remove=True) if remove_local_files: - self._remove_local_file(collection, representation_id, site_name) + self._remove_local_file(project_name, representation_id, site_name) def compute_resource_sync_sites(self, project_name): """Get available resource sync sites state for publish process. @@ -335,9 +335,9 @@ class SyncServerModule(OpenPypeModule, ITrayModule): return alt_site_pairs - def clear_project(self, collection, site_name): + def clear_project(self, project_name, site_name): """ - Clear 'collection' of 'site_name' and its local files + Clear 'project_name' of 'site_name' and its local files Works only on real local sites, not on 'studio' """ @@ -348,15 +348,15 @@ class SyncServerModule(OpenPypeModule, ITrayModule): # TODO currently not possible to replace with get_representations representations = list( - self.connection.database[collection].find(query)) + self.connection.database[project_name].find(query)) if not representations: self.log.debug("No repre found") return for repre in representations: - self.remove_site(collection, repre.get("_id"), site_name, True) + self.remove_site(project_name, repre.get("_id"), site_name, True) - def create_validate_project_task(self, collection, site_name): + def create_validate_project_task(self, project_name, site_name): """Adds metadata about project files validation on a queue. This process will loop through all representation and check if @@ -373,28 +373,28 @@ class SyncServerModule(OpenPypeModule, ITrayModule): """ task = { "type": "validate", - "project_name": collection, - "func": lambda: self.validate_project(collection, site_name, + "project_name": project_name, + "func": lambda: self.validate_project(project_name, site_name, reset_missing=True) } - self.projects_processed.add(collection) + self.projects_processed.add(project_name) self.long_running_tasks.append(task) - def validate_project(self, collection, site_name, reset_missing=False): - """Validate 'collection' of 'site_name' and its local files + def validate_project(self, project_name, site_name, reset_missing=False): + """Validate 'project_name' of 'site_name' and its local files If file present and not marked with a 'site_name' in DB, DB is updated with site name and file modified date. Args: - collection (string): project name + project_name (string): project name site_name (string): active site name reset_missing (bool): if True reset site in DB if missing physically """ - self.log.debug("Validation of {} for {} started".format(collection, + self.log.debug("Validation of {} for {} started".format(project_name, site_name)) - representations = list(get_representations(collection)) + representations = list(get_representations(project_name)) if not representations: self.log.debug("No repre found") return @@ -414,7 +414,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): continue file_path = repre_file.get("path", "") - local_file_path = self.get_local_file_path(collection, + local_file_path = self.get_local_file_path(project_name, site_name, file_path) @@ -426,14 +426,11 @@ class SyncServerModule(OpenPypeModule, ITrayModule): "Adding site {} for {}".format(site_name, repre_id)) - query = { - "_id": repre_id - } created_dt = datetime.fromtimestamp( os.path.getmtime(local_file_path)) elem = {"name": site_name, "created_dt": created_dt} - self._add_site(collection, query, repre, elem, + self._add_site(project_name, repre, elem, site_name=site_name, file_id=repre_file["_id"], force=True) @@ -443,41 +440,42 @@ class SyncServerModule(OpenPypeModule, ITrayModule): self.log.debug("Resetting site {} for {}". format(site_name, repre_id)) self.reset_site_on_representation( - collection, repre_id, site_name=site_name, + project_name, repre_id, site_name=site_name, file_id=repre_file["_id"]) sites_reset += 1 if sites_added % 100 == 0: self.log.debug("Sites added {}".format(sites_added)) - self.log.debug("Validation of {} for {} ended".format(collection, + self.log.debug("Validation of {} for {} ended".format(project_name, site_name)) self.log.info("Sites added {}, sites reset {}".format(sites_added, reset_missing)) - def pause_representation(self, collection, representation_id, site_name): + def pause_representation(self, project_name, representation_id, site_name): """ Sets 'representation_id' as paused, eg. no syncing should be happening on it. Args: - collection (string): project name + project_name (string): project name representation_id (string): MongoDB objectId value site_name (string): 'gdrive', 'studio' etc. """ log.info("Pausing SyncServer for {}".format(representation_id)) self._paused_representations.add(representation_id) - self.reset_site_on_representation(collection, representation_id, + self.reset_site_on_representation(project_name, representation_id, site_name=site_name, pause=True) - def unpause_representation(self, collection, representation_id, site_name): + def unpause_representation(self, project_name, + representation_id, site_name): """ Sets 'representation_id' as unpaused. Does not fail or warn if repre wasn't paused. Args: - collection (string): project name + project_name (string): project name representation_id (string): MongoDB objectId value site_name (string): 'gdrive', 'studio' etc. """ @@ -487,7 +485,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): except KeyError: pass # self.paused_representations is not persistent - self.reset_site_on_representation(collection, representation_id, + self.reset_site_on_representation(project_name, representation_id, site_name=site_name, pause=False) def is_representation_paused(self, representation_id, @@ -518,7 +516,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): happening on all representation inside. Args: - project_name (string): collection name + project_name (string): project_name name """ log.info("Pausing SyncServer for {}".format(project_name)) self._paused_projects.add(project_name) @@ -530,7 +528,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): Does not fail or warn if project wasn't paused. Args: - project_name (string): collection name + project_name (string): """ log.info("Unpausing SyncServer for {}".format(project_name)) try: @@ -543,7 +541,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): Returns if 'project_name' is paused or not. Args: - project_name (string): collection name + project_name (string): check_parents (bool): check if server itself is not paused Returns: @@ -942,8 +940,8 @@ class SyncServerModule(OpenPypeModule, ITrayModule): return True return False - def handle_alternate_site(self, collection, representation, processed_site, - file_id, synced_file_id): + def handle_alternate_site(self, project_name, representation, + processed_site, file_id, synced_file_id): """ For special use cases where one site vendors another. @@ -956,7 +954,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): same location >> file is accesible on 'sftp' site right away. Args: - collection (str): name of project + project_name (str): name of project representation (dict) processed_site (str): real site_name of published/uploaded file file_id (ObjectId): DB id of file handled @@ -980,26 +978,23 @@ class SyncServerModule(OpenPypeModule, ITrayModule): alternate_sites = set(alternate_sites) for alt_site in alternate_sites: - query = { - "_id": representation["_id"] - } elem = {"name": alt_site, "created_dt": datetime.now(), "id": synced_file_id} self.log.debug("Adding alternate {} to {}".format( alt_site, representation["_id"])) - self._add_site(collection, query, + self._add_site(project_name, representation, elem, alt_site, file_id=file_id, force=True) """ End of Public API """ - def get_local_file_path(self, collection, site_name, file_path): + def get_local_file_path(self, project_name, site_name, file_path): """ Externalized for app """ - handler = LocalDriveHandler(collection, site_name) + handler = LocalDriveHandler(project_name, site_name) local_file_path = handler.resolve_path(file_path) return local_file_path @@ -1286,7 +1281,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): return sites.get(site, 'N/A') @time_function - def get_sync_representations(self, collection, active_site, remote_site): + def get_sync_representations(self, project_name, active_site, remote_site): """ Get representations that should be synced, these could be recognised by presence of document in 'files.sites', where key is @@ -1297,8 +1292,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): better performance. Goal is to get as few representations as possible. Args: - collection (string): name of collection (in most cases matches - project name + project_name (string): active_site (string): identifier of current active site (could be 'local_0' when working from home, 'studio' when working in the studio (default) @@ -1307,10 +1301,10 @@ class SyncServerModule(OpenPypeModule, ITrayModule): Returns: (list) of dictionaries """ - log.debug("Check representations for : {}".format(collection)) - self.connection.Session["AVALON_PROJECT"] = collection + log.debug("Check representations for : {}".format(project_name)) + self.connection.Session["AVALON_PROJECT"] = project_name # retry_cnt - number of attempts to sync specific file before giving up - retries_arr = self._get_retries_arr(collection) + retries_arr = self._get_retries_arr(project_name) match = { "type": "representation", "$or": [ @@ -1447,14 +1441,14 @@ class SyncServerModule(OpenPypeModule, ITrayModule): return SyncStatus.DO_NOTHING - def update_db(self, collection, new_file_id, file, representation, + def update_db(self, project_name, new_file_id, file, representation, site, error=None, progress=None, priority=None): """ Update 'provider' portion of records in DB with success (file_id) or error (exception) Args: - collection (string): name of project - force to db connection as + project_name (string): name of project - force to db connection as each file might come from different collection new_file_id (string): file (dictionary): info about processed file (pulled from DB) @@ -1497,7 +1491,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): if file_id: arr_filter.append({'f._id': ObjectId(file_id)}) - self.connection.database[collection].update_one( + self.connection.database[project_name].update_one( query, update, upsert=True, @@ -1560,7 +1554,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): return -1, None - def reset_site_on_representation(self, collection, representation_id, + def reset_site_on_representation(self, project_name, representation_id, side=None, file_id=None, site_name=None, remove=False, pause=None, force=False): """ @@ -1577,7 +1571,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): Should be used when repre should be synced to new site. Args: - collection (string): name of project (eg. collection) in DB + project_name (string): name of project (eg. collection) in DB representation_id(string): _id of representation file_id (string): file _id in representation side (string): local or remote side @@ -1591,18 +1585,18 @@ class SyncServerModule(OpenPypeModule, ITrayModule): not 'force' ValueError - other errors (repre not found, misconfiguration) """ - representation = get_representation_by_id(collection, + representation = get_representation_by_id(project_name, representation_id) if not representation: raise ValueError("Representation {} not found in {}". - format(representation_id, collection)) + format(representation_id, project_name)) if side and site_name: raise ValueError("Misconfiguration, only one of side and " + "site_name arguments should be passed.") - local_site = self.get_active_site(collection) - remote_site = self.get_remote_site(collection) + local_site = self.get_active_site(project_name) + remote_site = self.get_remote_site(project_name) if side: if side == 'local': @@ -1612,42 +1606,44 @@ class SyncServerModule(OpenPypeModule, ITrayModule): elem = {"name": site_name} - query = { - "_id": ObjectId(representation_id) - } - if file_id: # reset site for particular file - self._reset_site_for_file(collection, query, + self._reset_site_for_file(project_name, representation_id, elem, file_id, site_name) elif side: # reset site for whole representation - self._reset_site(collection, query, elem, site_name) + self._reset_site(project_name, representation_id, elem, site_name) elif remove: # remove site for whole representation - self._remove_site(collection, query, representation, site_name) + self._remove_site(project_name, + representation, site_name) elif pause is not None: - self._pause_unpause_site(collection, query, + self._pause_unpause_site(project_name, representation, site_name, pause) else: # add new site to all files for representation - self._add_site(collection, query, representation, elem, site_name, + self._add_site(project_name, representation, elem, site_name, force=force) - def _update_site(self, collection, query, update, arr_filter): + def _update_site(self, project_name, representation_id, + update, arr_filter): """ Auxiliary method to call update_one function on DB Used for refactoring ugly reset_provider_for_file """ - self.connection.database[collection].update_one( + query = { + "_id": ObjectId(representation_id) + } + + self.connection.database[project_name].update_one( query, update, upsert=True, array_filters=arr_filter ) - def _reset_site_for_file(self, collection, query, + def _reset_site_for_file(self, project_name, representation_id, elem, file_id, site_name): """ Resets 'site_name' for 'file_id' on representation in 'query' on - 'collection' + 'project_name' """ update = { "$set": {"files.$[f].sites.$[s]": elem} @@ -1660,9 +1656,9 @@ class SyncServerModule(OpenPypeModule, ITrayModule): {'f._id': file_id} ] - self._update_site(collection, query, update, arr_filter) + self._update_site(project_name, representation_id, update, arr_filter) - def _reset_site(self, collection, query, elem, site_name): + def _reset_site(self, project_name, representation_id, elem, site_name): """ Resets 'site_name' for all files of representation in 'query' """ @@ -1674,9 +1670,9 @@ class SyncServerModule(OpenPypeModule, ITrayModule): {'s.name': site_name} ] - self._update_site(collection, query, update, arr_filter) + self._update_site(project_name, representation_id, update, arr_filter) - def _remove_site(self, collection, query, representation, site_name): + def _remove_site(self, project_name, representation, site_name): """ Removes 'site_name' for 'representation' in 'query' @@ -1698,10 +1694,11 @@ class SyncServerModule(OpenPypeModule, ITrayModule): } arr_filter = [] - self._update_site(collection, query, update, arr_filter) + self._update_site(project_name, representation["_id"], + update, arr_filter) - def _pause_unpause_site(self, collection, query, - representation, site_name, pause): + def _pause_unpause_site(self, project_name, representation, + site_name, pause): """ Pauses/unpauses all files for 'representation' based on 'pause' @@ -1733,12 +1730,13 @@ class SyncServerModule(OpenPypeModule, ITrayModule): {'s.name': site_name} ] - self._update_site(collection, query, update, arr_filter) + self._update_site(project_name, representation["_id"], + update, arr_filter) - def _add_site(self, collection, query, representation, elem, site_name, + def _add_site(self, project_name, representation, elem, site_name, force=False, file_id=None): """ - Adds 'site_name' to 'representation' on 'collection' + Adds 'site_name' to 'representation' on 'project_name' Args: representation (dict) @@ -1746,10 +1744,11 @@ class SyncServerModule(OpenPypeModule, ITrayModule): Use 'force' to remove existing or raises ValueError """ + representation_id = representation["_id"] reset_existing = False files = representation.get("files", []) if not files: - log.debug("No files for {}".format(representation["_id"])) + log.debug("No files for {}".format(representation_id)) return for repre_file in files: @@ -1759,7 +1758,8 @@ class SyncServerModule(OpenPypeModule, ITrayModule): for site in repre_file.get("sites"): if site["name"] == site_name: if force or site.get("error"): - self._reset_site_for_file(collection, query, + self._reset_site_for_file(project_name, + representation_id, elem, repre_file["_id"], site_name) reset_existing = True @@ -1785,14 +1785,15 @@ class SyncServerModule(OpenPypeModule, ITrayModule): {'f._id': file_id} ] - self._update_site(collection, query, update, arr_filter) + self._update_site(project_name, representation_id, + update, arr_filter) - def _remove_local_file(self, collection, representation_id, site_name): + def _remove_local_file(self, project_name, representation_id, site_name): """ Removes all local files for 'site_name' of 'representation_id' Args: - collection (string): project name (must match DB) + project_name (string): project name (must match DB) representation_id (string): MongoDB _id value site_name (string): name of configured and active site @@ -1808,7 +1809,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): provider_name = self.get_provider_for_site(site=site_name) if provider_name == 'local_drive': - representation = get_representation_by_id(collection, + representation = get_representation_by_id(project_name, representation_id, fields=["files"]) if not representation: @@ -1818,7 +1819,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): local_file_path = '' for file in representation.get("files"): - local_file_path = self.get_local_file_path(collection, + local_file_path = self.get_local_file_path(project_name, site_name, file.get("path", "") ) diff --git a/openpype/modules/sync_server/tray/models.py b/openpype/modules/sync_server/tray/models.py index a97797c920..f05a5bd8ea 100644 --- a/openpype/modules/sync_server/tray/models.py +++ b/openpype/modules/sync_server/tray/models.py @@ -441,7 +441,7 @@ class SyncRepresentationSummaryModel(_SyncRepresentationModel): full text filtering. Allows pagination, most of heavy lifting is being done on DB side. - Single model matches to single collection. When project is changed, + Single model matches to single project. When project is changed, model is reset and refreshed. Args: From 2a0e377ff4288a47efa184e51dd64a5158eeee62 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Tue, 2 Aug 2022 20:08:37 +0800 Subject: [PATCH 659/785] introduce a condition to exclude the unneccessary node attributes during collecting looks --- openpype/hosts/maya/plugins/publish/collect_look.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/publish/collect_look.py b/openpype/hosts/maya/plugins/publish/collect_look.py index ec583bcce7..4a14fc4451 100644 --- a/openpype/hosts/maya/plugins/publish/collect_look.py +++ b/openpype/hosts/maya/plugins/publish/collect_look.py @@ -551,7 +551,11 @@ class CollectLook(pyblish.api.InstancePlugin): if cmds.getAttr(attribute, type=True) == "message": continue node_attributes[attr] = cmds.getAttr(attribute) - + + # Only include if there are any properties we care about + if not node_attributes: + continue + attributes.append({"name": node, "uuid": lib.get_id(node), "attributes": node_attributes}) From 7f356587e38051dfb2ffb515af896a5bd916105c Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Tue, 2 Aug 2022 20:09:58 +0800 Subject: [PATCH 660/785] introduce a condition to exclude the unneccessary node attributes during collecting looks --- openpype/hosts/maya/plugins/publish/collect_look.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/collect_look.py b/openpype/hosts/maya/plugins/publish/collect_look.py index 4a14fc4451..157be5717b 100644 --- a/openpype/hosts/maya/plugins/publish/collect_look.py +++ b/openpype/hosts/maya/plugins/publish/collect_look.py @@ -551,11 +551,9 @@ class CollectLook(pyblish.api.InstancePlugin): if cmds.getAttr(attribute, type=True) == "message": continue node_attributes[attr] = cmds.getAttr(attribute) - # Only include if there are any properties we care about if not node_attributes: continue - attributes.append({"name": node, "uuid": lib.get_id(node), "attributes": node_attributes}) From eb2c82558888fe5650bdab4bee1a60a498b685fa Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 2 Aug 2022 16:09:59 +0200 Subject: [PATCH 661/785] OP-3405 - extracted aggregate query from Loader to Site Sync module --- .../modules/sync_server/sync_server_module.py | 89 +++++++++++++++++ openpype/tools/loader/model.py | 95 ++----------------- 2 files changed, 98 insertions(+), 86 deletions(-) diff --git a/openpype/modules/sync_server/sync_server_module.py b/openpype/modules/sync_server/sync_server_module.py index c4d90416bb..8fdfab9c2e 100644 --- a/openpype/modules/sync_server/sync_server_module.py +++ b/openpype/modules/sync_server/sync_server_module.py @@ -988,6 +988,95 @@ class SyncServerModule(OpenPypeModule, ITrayModule): representation, elem, alt_site, file_id=file_id, force=True) + def get_repre_info_for_versions(self, project_name, version_ids, + active_site, remote_site): + """Returns representation documents for versions and sites combi + + Args: + project_name (str) + version_ids (list): of version[_id] + active_site (string): 'local', 'studio' etc + remote_site (string): dtto + Returns: + + """ + self.connection.Session["AVALON_PROJECT"] = project_name + query = [ + {"$match": {"parent": {"$in": version_ids}, + "type": "representation", + "files.sites.name": {"$exists": 1}}}, + {"$unwind": "$files"}, + {'$addFields': { + 'order_local': { + '$filter': { + 'input': '$files.sites', 'as': 'p', + 'cond': {'$eq': ['$$p.name', active_site]} + } + } + }}, + {'$addFields': { + 'order_remote': { + '$filter': { + 'input': '$files.sites', 'as': 'p', + 'cond': {'$eq': ['$$p.name', remote_site]} + } + } + }}, + {'$addFields': { + 'progress_local': {"$arrayElemAt": [{ + '$cond': [ + {'$size': "$order_local.progress"}, + "$order_local.progress", + # if exists created_dt count is as available + {'$cond': [ + {'$size': "$order_local.created_dt"}, + [1], + [0] + ]} + ]}, + 0 + ]} + }}, + {'$addFields': { + 'progress_remote': {"$arrayElemAt": [{ + '$cond': [ + {'$size': "$order_remote.progress"}, + "$order_remote.progress", + # if exists created_dt count is as available + {'$cond': [ + {'$size': "$order_remote.created_dt"}, + [1], + [0] + ]} + ]}, + 0 + ]} + }}, + {'$group': { # first group by repre + '_id': '$_id', + 'parent': {'$first': '$parent'}, + 'avail_ratio_local': { + '$first': { + '$divide': [{'$sum': "$progress_local"}, {'$sum': 1}] + } + }, + 'avail_ratio_remote': { + '$first': { + '$divide': [{'$sum': "$progress_remote"}, {'$sum': 1}] + } + } + }}, + {'$group': { # second group by parent, eg version_id + '_id': '$parent', + 'repre_count': {'$sum': 1}, # total representations + # fully available representation for site + 'avail_repre_local': {'$sum': "$avail_ratio_local"}, + 'avail_repre_remote': {'$sum': "$avail_ratio_remote"}, + }}, + ] + # docs = list(self.connection.aggregate(query)) + return self.connection.aggregate(query) + """ End of Public API """ def get_local_file_path(self, project_name, site_name, file_path): diff --git a/openpype/tools/loader/model.py b/openpype/tools/loader/model.py index a5174bd804..3ce44ea6c8 100644 --- a/openpype/tools/loader/model.py +++ b/openpype/tools/loader/model.py @@ -272,15 +272,15 @@ class SubsetsModel(TreeModel, BaseRepresentationModel): # update availability on active site when version changes if self.sync_server.enabled and version_doc: - query = self._repre_per_version_pipeline( + repre_info = self.sync_server.get_repre_info_for_versions( + project_name, [version_doc["_id"]], self.active_site, self.remote_site ) - docs = list(self.dbcon.aggregate(query)) - if docs: - repre = docs.pop() - version_doc["data"].update(self._get_repre_dict(repre)) + if repre_info: + version_doc["data"].update( + self._get_repre_dict(repre_info[0])) self.set_version(index, version_doc) @@ -478,16 +478,16 @@ class SubsetsModel(TreeModel, BaseRepresentationModel): for _subset_id, doc in last_versions_by_subset_id.items(): version_ids.add(doc["_id"]) - query = self._repre_per_version_pipeline( + repres = self.sync_server.get_repre_info_for_versions( + project_name, list(version_ids), self.active_site, self.remote_site ) - - for doc in self.dbcon.aggregate(query): + for repre in repres: if self._doc_fetching_stop: return doc["active_provider"] = self.active_provider doc["remote_provider"] = self.remote_provider - repre_info[doc["_id"]] = doc + repre_info[repre["_id"]] = repre self._doc_payload = { "asset_docs_by_id": asset_docs_by_id, @@ -827,83 +827,6 @@ class SubsetsModel(TreeModel, BaseRepresentationModel): return data - def _repre_per_version_pipeline(self, version_ids, - active_site, remote_site): - query = [ - {"$match": {"parent": {"$in": version_ids}, - "type": "representation", - "files.sites.name": {"$exists": 1}}}, - {"$unwind": "$files"}, - {'$addFields': { - 'order_local': { - '$filter': { - 'input': '$files.sites', 'as': 'p', - 'cond': {'$eq': ['$$p.name', active_site]} - } - } - }}, - {'$addFields': { - 'order_remote': { - '$filter': { - 'input': '$files.sites', 'as': 'p', - 'cond': {'$eq': ['$$p.name', remote_site]} - } - } - }}, - {'$addFields': { - 'progress_local': {"$arrayElemAt": [{ - '$cond': [ - {'$size': "$order_local.progress"}, - "$order_local.progress", - # if exists created_dt count is as available - {'$cond': [ - {'$size': "$order_local.created_dt"}, - [1], - [0] - ]} - ]}, - 0 - ]} - }}, - {'$addFields': { - 'progress_remote': {"$arrayElemAt": [{ - '$cond': [ - {'$size': "$order_remote.progress"}, - "$order_remote.progress", - # if exists created_dt count is as available - {'$cond': [ - {'$size': "$order_remote.created_dt"}, - [1], - [0] - ]} - ]}, - 0 - ]} - }}, - {'$group': { # first group by repre - '_id': '$_id', - 'parent': {'$first': '$parent'}, - 'avail_ratio_local': { - '$first': { - '$divide': [{'$sum': "$progress_local"}, {'$sum': 1}] - } - }, - 'avail_ratio_remote': { - '$first': { - '$divide': [{'$sum': "$progress_remote"}, {'$sum': 1}] - } - } - }}, - {'$group': { # second group by parent, eg version_id - '_id': '$parent', - 'repre_count': {'$sum': 1}, # total representations - # fully available representation for site - 'avail_repre_local': {'$sum': "$avail_ratio_local"}, - 'avail_repre_remote': {'$sum': "$avail_ratio_remote"}, - }}, - ] - return query - class GroupMemberFilterProxyModel(QtCore.QSortFilterProxyModel): """Provide the feature of filtering group by the acceptance of members From 26c4a0f8ca19eeb4faaa85ceac1524c3bed71b7d Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 2 Aug 2022 16:15:17 +0200 Subject: [PATCH 662/785] OP-3405 - Hound --- openpype/modules/sync_server/providers/local_drive.py | 3 ++- openpype/modules/sync_server/sync_server.py | 9 +++++---- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/openpype/modules/sync_server/providers/local_drive.py b/openpype/modules/sync_server/providers/local_drive.py index 4951ef4d1a..01bc891d08 100644 --- a/openpype/modules/sync_server/providers/local_drive.py +++ b/openpype/modules/sync_server/providers/local_drive.py @@ -111,7 +111,8 @@ class LocalDriveHandler(AbstractProvider): Download a file form 'source_path' to 'local_path' """ return self.upload_file(source_path, local_path, - server, project_name, file, representation, site, + server, project_name, file, + representation, site, overwrite, direction="Download") def delete_file(self, path): diff --git a/openpype/modules/sync_server/sync_server.py b/openpype/modules/sync_server/sync_server.py index 9cc55ec562..97538fcd4e 100644 --- a/openpype/modules/sync_server/sync_server.py +++ b/openpype/modules/sync_server/sync_server.py @@ -54,8 +54,9 @@ async def upload(module, project_name, file, representation, provider_name, file_path = file.get("path", "") try: - local_file_path, remote_file_path = resolve_paths(module, - file_path, project_name, remote_site_name, remote_handler + local_file_path, remote_file_path = resolve_paths( + module, file_path, project_name, + remote_site_name, remote_handler ) except Exception as exp: print(exp) @@ -270,8 +271,8 @@ class SyncServerThread(threading.Thread): - gets list of collections in DB - gets list of active remote providers (has configuration, credentials) - - for each project_name it looks for representations that should - be synced + - for each project_name it looks for representations that + should be synced - synchronize found collections - update representations - fills error messages for exceptions - waits X seconds and repeat From 80b6ef981a5bc43bf2f2eea5ce06895057472a9a Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 2 Aug 2022 18:13:39 +0200 Subject: [PATCH 663/785] OP-3684 - fix for new publisher New publisher expects frames in file names in '.0000.' format, AE by default provides ('_0000.'). Locally rendered files need to be renamed to appropriate format. --- .../plugins/publish/extract_local_render.py | 38 +++++++++++++++++-- 1 file changed, 35 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/aftereffects/plugins/publish/extract_local_render.py b/openpype/hosts/aftereffects/plugins/publish/extract_local_render.py index 7323a0b125..67a89ba9df 100644 --- a/openpype/hosts/aftereffects/plugins/publish/extract_local_render.py +++ b/openpype/hosts/aftereffects/plugins/publish/extract_local_render.py @@ -1,7 +1,8 @@ import os import sys import six - +import re +import shutil import openpype.api from openpype.hosts.aftereffects.api import get_stub @@ -22,15 +23,26 @@ class ExtractLocalRender(openpype.api.Extractor): # pull file name from Render Queue Output module render_q = stub.get_render_info() stub.render(staging_dir) + render_q_file_name = render_q.file_name if not render_q: raise ValueError("No file extension set in Render Queue") - _, ext = os.path.splitext(os.path.basename(render_q.file_name)) + _, ext = os.path.splitext(os.path.basename(render_q_file_name)) ext = ext[1:] + replace_frames_format = self._get_replace_format(render_q_file_name) + first_file_path = None files = [] - self.log.info("files::{}".format(os.listdir(staging_dir))) for file_name in os.listdir(staging_dir): + _, found_ext = os.path.splitext(file_name) + if found_ext[1:] != ext: + continue + + if replace_frames_format: + file_name = self._translate_frames(file_name, + replace_frames_format, + staging_dir) + files.append(file_name) if first_file_path is None: first_file_path = os.path.join(staging_dir, @@ -78,3 +90,23 @@ class ExtractLocalRender(openpype.api.Extractor): "stagingDir": staging_dir, "tags": ["thumbnail"] }) + + def _translate_frames(self, file_name, replace_frames_format, staging_dir): + orig_file_name = file_name + + found_frames = re.search(replace_frames_format, file_name) + if found_frames: + new_frames = found_frames.group(0).replace('_', '.') + file_name = file_name.replace(found_frames.group(0), new_frames) + shutil.move(os.path.join(staging_dir, orig_file_name), + os.path.join(staging_dir, file_name)) + + return file_name + + def _get_replace_format(self, file_name): + # replace delimiter for frames to one integrate is expecting (.0000.) + # returns frame format to be replaced + hashes_found = re.search(r"(_%5B[#]*%5D.)", file_name) + if hashes_found: + hashes = re.sub("[^#]", '', hashes_found.group(0)) + return "_[0-9]{{{0}}}.".format(len(hashes)) From a605cba4b99d056e1f797c426482292b34c31415 Mon Sep 17 00:00:00 2001 From: OpenPype Date: Wed, 3 Aug 2022 04:07:35 +0000 Subject: [PATCH 664/785] [Automated] Bump version --- CHANGELOG.md | 24 ++++++++++-------------- openpype/version.py | 2 +- pyproject.toml | 2 +- 3 files changed, 12 insertions(+), 16 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index eab4e5e45e..2c9671c8b8 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,6 @@ # Changelog -## [3.12.3-nightly.1](https://github.com/pypeclub/OpenPype/tree/HEAD) +## [3.12.3-nightly.2](https://github.com/pypeclub/OpenPype/tree/HEAD) [Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.12.2...HEAD) @@ -11,21 +11,27 @@ **🚀 Enhancements** - Kitsu: Shot&Sequence name with prefix over appends [\#3593](https://github.com/pypeclub/OpenPype/pull/3593) -- Ftrack: Update ftrack api to 2.3.3 [\#3588](https://github.com/pypeclub/OpenPype/pull/3588) +- Photoshop: implemented {layer} placeholder in subset template [\#3591](https://github.com/pypeclub/OpenPype/pull/3591) - General: New Integrator small fixes [\#3583](https://github.com/pypeclub/OpenPype/pull/3583) **🐛 Bug fixes** +- TrayPublisher: Fix wrong conflict merge [\#3600](https://github.com/pypeclub/OpenPype/pull/3600) +- Bugfix: Add OCIO as submodule to prepare for handling `maketx` color space conversion. [\#3590](https://github.com/pypeclub/OpenPype/pull/3590) - Editorial publishing workflow improvements [\#3580](https://github.com/pypeclub/OpenPype/pull/3580) - Nuke: render family integration consistency [\#3576](https://github.com/pypeclub/OpenPype/pull/3576) - Ftrack: Handle missing published path in integrator [\#3570](https://github.com/pypeclub/OpenPype/pull/3570) -- Nuke: publish existing frames with slate with correct range [\#3555](https://github.com/pypeclub/OpenPype/pull/3555) **🔀 Refactored code** +- General: Use query functions in general code [\#3596](https://github.com/pypeclub/OpenPype/pull/3596) - General: Separate extraction of template data into more functions [\#3574](https://github.com/pypeclub/OpenPype/pull/3574) - General: Lib cleanup [\#3571](https://github.com/pypeclub/OpenPype/pull/3571) +**Merged pull requests:** + +- Enable write color sets on animation publish automatically [\#3582](https://github.com/pypeclub/OpenPype/pull/3582) + ## [3.12.2](https://github.com/pypeclub/OpenPype/tree/3.12.2) (2022-07-27) [Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.12.2-nightly.4...3.12.2) @@ -54,6 +60,7 @@ - NewPublisher: Python 2 compatible html escape [\#3559](https://github.com/pypeclub/OpenPype/pull/3559) - Remove invalid submodules from `/vendor` [\#3557](https://github.com/pypeclub/OpenPype/pull/3557) - General: Remove hosts filter on integrator plugins [\#3556](https://github.com/pypeclub/OpenPype/pull/3556) +- Nuke: publish existing frames with slate with correct range [\#3555](https://github.com/pypeclub/OpenPype/pull/3555) - Settings: Clean default values of environments [\#3550](https://github.com/pypeclub/OpenPype/pull/3550) - Module interfaces: Fix import error [\#3547](https://github.com/pypeclub/OpenPype/pull/3547) - Workfiles tool: Show of tool and it's flags [\#3539](https://github.com/pypeclub/OpenPype/pull/3539) @@ -66,7 +73,6 @@ - TrayPublisher: Simple creation enhancements and fixes [\#3513](https://github.com/pypeclub/OpenPype/pull/3513) - TrayPublisher: Make sure host name is filled [\#3504](https://github.com/pypeclub/OpenPype/pull/3504) - NewPublisher: Groups work and enum multivalue [\#3501](https://github.com/pypeclub/OpenPype/pull/3501) -- Resolve: removed few bugs [\#3464](https://github.com/pypeclub/OpenPype/pull/3464) **🔀 Refactored code** @@ -77,7 +83,6 @@ - General: Move load related functions into pipeline [\#3527](https://github.com/pypeclub/OpenPype/pull/3527) - General: Get current context document functions [\#3522](https://github.com/pypeclub/OpenPype/pull/3522) - Kitsu: Use query function from client [\#3496](https://github.com/pypeclub/OpenPype/pull/3496) -- Deadline: Use query functions [\#3466](https://github.com/pypeclub/OpenPype/pull/3466) **Merged pull requests:** @@ -94,7 +99,6 @@ - NewPublisher: Added ability to use label of instance [\#3484](https://github.com/pypeclub/OpenPype/pull/3484) - General: Creator Plugins have access to project [\#3476](https://github.com/pypeclub/OpenPype/pull/3476) - General: Better arguments order in creator init [\#3475](https://github.com/pypeclub/OpenPype/pull/3475) -- Ftrack: Trigger custom ftrack events on project creation and preparation [\#3465](https://github.com/pypeclub/OpenPype/pull/3465) **🐛 Bug fixes** @@ -105,14 +109,6 @@ - New Publisher: Disabled context change allows creation [\#3478](https://github.com/pypeclub/OpenPype/pull/3478) - General: thumbnail extractor fix [\#3474](https://github.com/pypeclub/OpenPype/pull/3474) - Kitsu: bugfix with sync-service ans publish plugins [\#3473](https://github.com/pypeclub/OpenPype/pull/3473) -- Flame: solved problem with multi-selected loading [\#3470](https://github.com/pypeclub/OpenPype/pull/3470) -- General: Fix query function in update logic [\#3468](https://github.com/pypeclub/OpenPype/pull/3468) -- General: Delete old versions is safer when ftrack is disabled [\#3462](https://github.com/pypeclub/OpenPype/pull/3462) - -**🔀 Refactored code** - -- Maya: Merge animation + pointcache extractor logic [\#3461](https://github.com/pypeclub/OpenPype/pull/3461) -- Maya: Re-use `maintained\_time` from lib [\#3460](https://github.com/pypeclub/OpenPype/pull/3460) ## [3.12.0](https://github.com/pypeclub/OpenPype/tree/3.12.0) (2022-06-28) diff --git a/openpype/version.py b/openpype/version.py index 03fd5fb96e..636dff5930 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.12.3-nightly.1" +__version__ = "3.12.3-nightly.2" diff --git a/pyproject.toml b/pyproject.toml index 118355395a..9ab2fd4513 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "OpenPype" -version = "3.12.3-nightly.1" # OpenPype +version = "3.12.3-nightly.2" # OpenPype description = "Open VFX and Animation pipeline with support." authors = ["OpenPype Team "] license = "MIT License" From 0761ba4bc3b029cc5a130f3cde5dcedebefa0d7a Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 3 Aug 2022 13:34:24 +0200 Subject: [PATCH 665/785] OP-3684 - fix output compare for automatic testing --- tests/lib/testing_classes.py | 27 +++++++++------------------ 1 file changed, 9 insertions(+), 18 deletions(-) diff --git a/tests/lib/testing_classes.py b/tests/lib/testing_classes.py index f991f02227..aa366cd005 100644 --- a/tests/lib/testing_classes.py +++ b/tests/lib/testing_classes.py @@ -314,30 +314,21 @@ class PublishTest(ModuleUnitTest): Compares only presence, not size nor content! """ - published_dir_base = download_test_data - published_dir = os.path.join(output_folder_url, - self.PROJECT, - self.ASSET, - self.TASK, - "**") - expected_dir_base = os.path.join(published_dir_base, + published_dir_base = output_folder_url + expected_dir_base = os.path.join(download_test_data, "expected") - expected_dir = os.path.join(expected_dir_base, - self.PROJECT, - self.ASSET, - self.TASK, - "**") - print("Comparing published:'{}' : expected:'{}'".format(published_dir, - expected_dir)) + + print("Comparing published:'{}' : expected:'{}'".format(published_dir_base, + expected_dir_base)) published = set(f.replace(published_dir_base, '') for f in - glob.glob(published_dir, recursive=True) if + glob.glob(published_dir_base + "\\**", recursive=True) if f != published_dir_base and os.path.exists(f)) expected = set(f.replace(expected_dir_base, '') for f in - glob.glob(expected_dir, recursive=True) if + glob.glob(expected_dir_base + "\\**", recursive=True) if f != expected_dir_base and os.path.exists(f)) - not_matched = expected.difference(published) - assert not not_matched, "Missing {} files".format(not_matched) + not_matched = expected.symmetric_difference(published) + assert not not_matched, "Missing {} files".format("\n".join(sorted(not_matched))) class HostFixtures(PublishTest): From 67b9946f2057fb44133edfd2fa70fddfe9ce2de3 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 3 Aug 2022 13:36:20 +0200 Subject: [PATCH 666/785] OP-3684 - added new testing class for multiframe AE publish Previous test published only single frame, didn't catch issue in new integrate. --- ...test_publish_in_aftereffects_multiframe.py | 64 +++++++++++++++++++ 1 file changed, 64 insertions(+) create mode 100644 tests/integration/hosts/aftereffects/test_publish_in_aftereffects_multiframe.py diff --git a/tests/integration/hosts/aftereffects/test_publish_in_aftereffects_multiframe.py b/tests/integration/hosts/aftereffects/test_publish_in_aftereffects_multiframe.py new file mode 100644 index 0000000000..c882e0f9b2 --- /dev/null +++ b/tests/integration/hosts/aftereffects/test_publish_in_aftereffects_multiframe.py @@ -0,0 +1,64 @@ +import logging + +from tests.lib.assert_classes import DBAssert +from tests.integration.hosts.aftereffects.lib import AfterEffectsTestClass + +log = logging.getLogger("test_publish_in_aftereffects") + + +class TestPublishInAfterEffects(AfterEffectsTestClass): + """Basic test case for publishing in AfterEffects + + Should publish 5 frames + """ + PERSIST = True + + TEST_FILES = [ + ("12aSDRjthn4X3yw83gz_0FZJcRRiVDEYT", + "test_aftereffects_publish_multiframe.zip", + "") + ] + + APP = "aftereffects" + APP_VARIANT = "" + + APP_NAME = "{}/{}".format(APP, APP_VARIANT) + + TIMEOUT = 120 # publish timeout + + def test_db_asserts(self, dbcon, publish_finished): + """Host and input data dependent expected results in DB.""" + print("test_db_asserts") + failures = [] + + failures.append(DBAssert.count_of_types(dbcon, "version", 2)) + + failures.append( + DBAssert.count_of_types(dbcon, "version", 0, name={"$ne": 1})) + + failures.append( + DBAssert.count_of_types(dbcon, "subset", 1, + name="imageMainBackgroundcopy")) + + failures.append( + DBAssert.count_of_types(dbcon, "subset", 1, + name="workfileTest_task")) + + failures.append( + DBAssert.count_of_types(dbcon, "subset", 1, + name="reviewTesttask")) + + failures.append( + DBAssert.count_of_types(dbcon, "representation", 4)) + + additional_args = {"context.subset": "renderTestTaskDefault", + "context.ext": "png"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 1, + additional_args=additional_args)) + + assert not any(failures) + + +if __name__ == "__main__": + test_case = TestPublishInAfterEffects() From 9ed329aebe6e114d47195e6dc456898569e0d404 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Wed, 3 Aug 2022 14:26:05 +0200 Subject: [PATCH 667/785] :bug: filter out non-build versions and fixing the error message --- .../custom/plugins/GlobalJobPreLoad.py | 18 ++++++++++++++++-- .../custom/plugins/OpenPype/OpenPype.py | 18 ++++++++++++++++-- 2 files changed, 32 insertions(+), 4 deletions(-) diff --git a/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py b/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py index a43c6c7733..5e923eb09a 100644 --- a/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py +++ b/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py @@ -10,10 +10,23 @@ import re from Deadline.Scripting import RepositoryUtils, FileUtils, DirectoryUtils -def get_openpype_version_from_path(path): +def get_openpype_version_from_path(path, build=True): + """Get OpenPype version from provided path. + path (str): Path to scan. + build (bool, optional): Get only builds, not sources + + Returns: + str or None: version of OpenPype if found. + + """ version_file = os.path.join(path, "openpype", "version.py") if not os.path.isfile(version_file): return None + # skip if the version is not build + if not build and \ + (not os.path.isfile(os.path.join(path, "openpype_console")) or + not os.path.isfile(os.path.join(path, "openpype_console.exe"))): + return None version = {} with open(version_file, "r") as vf: exec(vf.read(), version) @@ -101,7 +114,8 @@ def inject_openpype_environment(deadlinePlugin): if exe == "": raise RuntimeError( "OpenPype executable was not found " + - "in the semicolon separated list \"" + exe_list + "\". " + + "in the semicolon separated list " + + "\"" + ";".join(exe_list) + "\". " + "The path to the render executable can be configured " + "from the Plugin Configuration in the Deadline Monitor.") diff --git a/openpype/modules/deadline/repository/custom/plugins/OpenPype/OpenPype.py b/openpype/modules/deadline/repository/custom/plugins/OpenPype/OpenPype.py index b84560f175..764dc4c4ba 100644 --- a/openpype/modules/deadline/repository/custom/plugins/OpenPype/OpenPype.py +++ b/openpype/modules/deadline/repository/custom/plugins/OpenPype/OpenPype.py @@ -61,10 +61,23 @@ class OpenPypeDeadlinePlugin(DeadlinePlugin): ".*Progress: (\d+)%.*").HandleCallback += self.HandleProgress @staticmethod - def get_openpype_version_from_path(path): + def get_openpype_version_from_path(path, build=True): + """Get OpenPype version from provided path. + path (str): Path to scan. + build (bool, optional): Get only builds, not sources + + Returns: + str or None: version of OpenPype if found. + + """ version_file = os.path.join(path, "openpype", "version.py") if not os.path.isfile(version_file): return None + # skip if the version is not build + if not build and \ + (not os.path.isfile(os.path.join(path, "openpype_console")) or + not os.path.isfile(os.path.join(path, "openpype_console.exe"))): + return None version = {} with open(version_file, "r") as vf: exec(vf.read(), version) @@ -136,7 +149,8 @@ class OpenPypeDeadlinePlugin(DeadlinePlugin): if exe == "": self.FailRender( "OpenPype executable was not found " + - "in the semicolon separated list \"" + exe_list + "\". " + + "in the semicolon separated list " + + "\"" + ";".join(exe_list) + "\". " + "The path to the render executable can be configured " + "from the Plugin Configuration in the Deadline Monitor.") return exe From ef60744d9c1aedea3f442792733d844ed0d845b7 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 3 Aug 2022 14:58:36 +0200 Subject: [PATCH 668/785] OP-3684 - added default to Integrate Setting to skip render.farm New publisher requires main family as 'render', so there will be need to skip 'render.farm' which should not be integrated during initial publish. (Currently only affecting AE.) --- openpype/plugins/publish/integrate.py | 11 ++++++----- .../settings/defaults/project_settings/global.json | 11 ++++++++++- 2 files changed, 16 insertions(+), 6 deletions(-) diff --git a/openpype/plugins/publish/integrate.py b/openpype/plugins/publish/integrate.py index d817595888..70ab9f611e 100644 --- a/openpype/plugins/publish/integrate.py +++ b/openpype/plugins/publish/integrate.py @@ -167,7 +167,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin): skip_host_families = [] def process(self, instance): - if self._temp_skip_instance_by_settings(instance): + if self.skip_instance_by_settings(instance): return # Mark instance as processed for legacy integrator @@ -203,11 +203,12 @@ class IntegrateAsset(pyblish.api.InstancePlugin): # the try, except. file_transactions.finalize() - def _temp_skip_instance_by_settings(self, instance): - """Decide if instance will be processed with new or legacy integrator. + def skip_instance_by_settings(self, instance): + """Decide if instance will be processed with new integrator. - This is temporary solution until we test all usecases with new (this) - integrator plugin. + This might be temporary solution for broken publishing for any families + (therefore it should fallback into legacy publish plugin) OR this + could replace 'exclude_families' in legacy plugin (host is required). """ host_name = instance.context.data["hostName"] diff --git a/openpype/settings/defaults/project_settings/global.json b/openpype/settings/defaults/project_settings/global.json index e509db2791..d349066924 100644 --- a/openpype/settings/defaults/project_settings/global.json +++ b/openpype/settings/defaults/project_settings/global.json @@ -225,7 +225,16 @@ ] }, "IntegrateAsset": { - "skip_host_families": [] + "skip_host_families": [ + { + "host": [ + "aftereffects" + ], + "families": [ + "render.farm" + ] + } + ] }, "IntegrateHeroVersion": { "enabled": true, From d0ac6bc9b0b55cbe4897d9ba129412316202d6eb Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 3 Aug 2022 15:08:13 +0200 Subject: [PATCH 669/785] OP-3684 - Hound --- tests/lib/testing_classes.py | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/tests/lib/testing_classes.py b/tests/lib/testing_classes.py index aa366cd005..2b4d7deb48 100644 --- a/tests/lib/testing_classes.py +++ b/tests/lib/testing_classes.py @@ -318,17 +318,18 @@ class PublishTest(ModuleUnitTest): expected_dir_base = os.path.join(download_test_data, "expected") - print("Comparing published:'{}' : expected:'{}'".format(published_dir_base, - expected_dir_base)) + print("Comparing published:'{}' : expected:'{}'".format( + published_dir_base, expected_dir_base)) published = set(f.replace(published_dir_base, '') for f in - glob.glob(published_dir_base + "\\**", recursive=True) if - f != published_dir_base and os.path.exists(f)) + glob.glob(published_dir_base + "\\**", recursive=True) + if f != published_dir_base and os.path.exists(f)) expected = set(f.replace(expected_dir_base, '') for f in - glob.glob(expected_dir_base + "\\**", recursive=True) if - f != expected_dir_base and os.path.exists(f)) + glob.glob(expected_dir_base + "\\**", recursive=True) + if f != expected_dir_base and os.path.exists(f)) not_matched = expected.symmetric_difference(published) - assert not not_matched, "Missing {} files".format("\n".join(sorted(not_matched))) + assert not not_matched, "Missing {} files".format( + "\n".join(sorted(not_matched))) class HostFixtures(PublishTest): From fec91f054d6766ca4d597faa225ab1d783515026 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 3 Aug 2022 15:33:38 +0200 Subject: [PATCH 670/785] don't force to have dot before frame in new integrator --- openpype/plugins/publish/integrate.py | 70 ++++++++------------------- 1 file changed, 19 insertions(+), 51 deletions(-) diff --git a/openpype/plugins/publish/integrate.py b/openpype/plugins/publish/integrate.py index d817595888..f7f5ca2aeb 100644 --- a/openpype/plugins/publish/integrate.py +++ b/openpype/plugins/publish/integrate.py @@ -23,41 +23,6 @@ from openpype.pipeline.publish import KnownPublishError log = logging.getLogger(__name__) -def assemble(files): - """Convenience `clique.assemble` wrapper for files of a single collection. - - Unlike `clique.assemble` this wrapper does not allow more than a single - Collection nor any remainder files. Errors will be raised when not only - a single collection is assembled. - - Returns: - clique.Collection: A single sequence Collection - - Raises: - ValueError: Error is raised when files do not result in a single - collected Collection. - - """ - # todo: move this to lib? - # Get the sequence as a collection. The files must be of a single - # sequence and have no remainder outside of the collections. - patterns = [clique.PATTERNS["frames"]] - collections, remainder = clique.assemble(files, - minimum_items=1, - patterns=patterns) - if not collections: - raise ValueError("No collections found in files: " - "{}".format(files)) - if remainder: - raise ValueError("Files found not detected as part" - " of a sequence: {}".format(remainder)) - if len(collections) > 1: - raise ValueError("Files in sequence are not part of a" - " single sequence collection: " - "{}".format(collections)) - return collections[0] - - def get_instance_families(instance): """Get all families of the instance""" # todo: move this to lib? @@ -576,7 +541,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin): if any(os.path.isabs(fname) for fname in files): raise KnownPublishError("Given file names contain full paths") - src_collection = assemble(files) + src_collection = clique.assemble(files) destination_indexes = list(src_collection.indexes) # Use last frame for minimum padding @@ -609,31 +574,34 @@ class IntegrateAsset(pyblish.api.InstancePlugin): # a Frame or UDIM tile set for the template data. We use the first # index of the destination for that because that could've shifted # from the source indexes, etc. - first_index_padded = get_frame_padded(frame=destination_indexes[0], - padding=destination_padding) - if is_udim: - # UDIM representations handle ranges in a different manner - template_data["udim"] = first_index_padded - else: - template_data["frame"] = first_index_padded + first_index_padded = get_frame_padded( + frame=destination_indexes[0], + padding=destination_padding + ) # Construct destination collection from template - anatomy_filled = anatomy.format(template_data) - template_filled = anatomy_filled[template_name]["path"] - repre_context = template_filled.used_values + repre_context = None + dst_filepaths = [] + for index in destination_indexes: + if is_udim: + template_data["udim"] = index + else: + template_data["frame"] = index + anatomy_filled = anatomy.format(template_data) + template_filled = anatomy_filled[template_name]["path"] + dst_filepaths.append(template_filled) + if repre_context is None: + repre_context = template_filled.used_value + self.log.debug("Template filled: {}".format(str(template_filled))) # Make sure context contains frame # NOTE: Frame would not be available only if template does not # contain '{frame}' in template -> Do we want support it? if not is_udim: repre_context["frame"] = first_index_padded - self.log.debug("Template filled: {}".format(str(template_filled))) - dst_collection = assemble([os.path.normpath(template_filled)]) - # Update the destination indexes and padding - dst_collection.indexes.clear() - dst_collection.indexes.update(set(destination_indexes)) + dst_collection = clique.assemble(dst_filepaths) dst_collection.padding = destination_padding if len(src_collection.indexes) != len(dst_collection.indexes): raise KnownPublishError(( From 573d0a5ae12daddaef84f1b1d5c46f0048a780c6 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 3 Aug 2022 16:00:50 +0200 Subject: [PATCH 671/785] add fps to newly created representation --- openpype/plugins/publish/extract_review.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/plugins/publish/extract_review.py b/openpype/plugins/publish/extract_review.py index 1b6e2a1d61..533a87acb4 100644 --- a/openpype/plugins/publish/extract_review.py +++ b/openpype/plugins/publish/extract_review.py @@ -360,6 +360,7 @@ class ExtractReview(pyblish.api.InstancePlugin): os.unlink(f) new_repre.update({ + "fps": temp_data["fps"], "name": "{}_{}".format(output_name, output_ext), "outputName": output_name, "outputDef": output_def, From bd4ebab60d5ba8bbca96eaea15080d79cc29d5e0 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 3 Aug 2022 16:03:48 +0200 Subject: [PATCH 672/785] make sure ftrackreview-image is renamed to thumbnail if there is ftrackreview-mp4 to be able play it --- .../plugins/publish/integrate_ftrack_api.py | 43 +++++++++++++++++-- 1 file changed, 40 insertions(+), 3 deletions(-) diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_api.py b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_api.py index 58591bacfd..20a69e060c 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_api.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_api.py @@ -87,6 +87,7 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin): asset_versions_data_by_id = {} used_asset_versions = [] + # Iterate over components and publish for data in component_list: self.log.debug("data: {}".format(data)) @@ -116,9 +117,6 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin): asset_version_status_ids_by_name ) - # Component - self.create_component(session, asset_version_entity, data) - # Store asset version and components items that were version_id = asset_version_entity["id"] if version_id not in asset_versions_data_by_id: @@ -135,6 +133,8 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin): if asset_version_entity not in used_asset_versions: used_asset_versions.append(asset_version_entity) + self._create_components(session, asset_versions_data_by_id) + instance.data["ftrackIntegratedAssetVersionsData"] = ( asset_versions_data_by_id ) @@ -623,3 +623,40 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin): session.rollback() session._configure_locations() six.reraise(tp, value, tb) + + def _create_components(self, session, asset_versions_data_by_id): + for item in asset_versions_data_by_id.values(): + asset_version_entity = item["asset_version"] + component_items = item["component_items"] + + component_entities = session.query( + ( + "select id, name from Component where version_id is \"{}\"" + ).format(asset_version_entity["id"]) + ).all() + + existing_component_names = { + component["name"] + for component in component_entities + } + + contain_review = "ftrackreview-mp4" in existing_component_names + thumbnail_component_item = None + for component_item in component_items: + component_data = component_item.get("component_data") or {} + component_name = component_data.get("name") + if component_name == "ftrackreview-mp4": + contain_review = True + elif component_name == "ftrackreview-image": + thumbnail_component_item = component_item + + if contain_review and thumbnail_component_item: + thumbnail_component_item["component_data"]["name"] = ( + "thumbnail" + ) + + # Component + for component_item in component_items: + self.create_component( + session, asset_version_entity, component_item + ) From c64925fb665ee3bcb49837dcb2fff7f03a7390f3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Wed, 3 Aug 2022 16:12:21 +0200 Subject: [PATCH 673/785] :rotating_light: I hate you Hound so much --- .../deadline/repository/custom/plugins/OpenPype/OpenPype.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/modules/deadline/repository/custom/plugins/OpenPype/OpenPype.py b/openpype/modules/deadline/repository/custom/plugins/OpenPype/OpenPype.py index 764dc4c4ba..79101bb90c 100644 --- a/openpype/modules/deadline/repository/custom/plugins/OpenPype/OpenPype.py +++ b/openpype/modules/deadline/repository/custom/plugins/OpenPype/OpenPype.py @@ -76,7 +76,7 @@ class OpenPypeDeadlinePlugin(DeadlinePlugin): # skip if the version is not build if not build and \ (not os.path.isfile(os.path.join(path, "openpype_console")) or - not os.path.isfile(os.path.join(path, "openpype_console.exe"))): + not os.path.isfile(os.path.join(path, "openpype_console.exe"))): # noqa: E501 return None version = {} with open(version_file, "r") as vf: From 502a8c6ee7f55c7c98dfa1fd2033cf286116f9bd Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 3 Aug 2022 16:12:52 +0200 Subject: [PATCH 674/785] add more metadata to ftrack components --- .../publish/integrate_ftrack_instances.py | 151 ++++++++++++++---- 1 file changed, 121 insertions(+), 30 deletions(-) diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py index d937e64790..4c0e5127fa 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py @@ -3,7 +3,10 @@ import json import copy import pyblish.api -from openpype.lib import get_ffprobe_streams +from openpype.lib.transcoding import ( + get_ffprobe_streams, + convert_ffprobe_fps_to_float, +) from openpype.lib.profiles_filtering import filter_profiles @@ -79,11 +82,6 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): ).format(family)) return - # Prepare FPS - instance_fps = instance.data.get("fps") - if instance_fps is None: - instance_fps = instance.context.data["fps"] - status_name = self._get_asset_version_status_name(instance) # Base of component item data @@ -168,10 +166,7 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): # Add item to component list component_list.append(thumbnail_item) - if ( - not review_representations - and first_thumbnail_component is not None - ): + if first_thumbnail_component is not None: width = first_thumbnail_component_repre.get("width") height = first_thumbnail_component_repre.get("height") if not width or not height: @@ -253,20 +248,9 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): first_thumbnail_component[ "asset_data"]["name"] = extended_asset_name - frame_start = repre.get("frameStartFtrack") - frame_end = repre.get("frameEndFtrack") - if frame_start is None or frame_end is None: - frame_start = instance.data["frameStart"] - frame_end = instance.data["frameEnd"] - - # Frame end of uploaded video file should be duration in frames - # - frame start is always 0 - # - frame end is duration in frames - duration = frame_end - frame_start + 1 - - fps = repre.get("fps") - if fps is None: - fps = instance_fps + component_meta = self._prepare_component_metadata( + instance, repre, repre_path, True + ) # Change location review_item["component_path"] = repre_path @@ -275,11 +259,7 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): # Default component name is "main". "name": "ftrackreview-mp4", "metadata": { - "ftr_meta": json.dumps({ - "frameIn": 0, - "frameOut": int(duration), - "frameRate": float(fps) - }) + "ftr_meta": json.dumps(component_meta) } } @@ -339,9 +319,17 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): ): other_item["asset_data"]["name"] = extended_asset_name - other_item["component_data"] = { + component_meta = self._prepare_component_metadata( + instance, repre, published_path, False + ) + component_data = { "name": repre["name"] } + if component_meta: + component_data["metadata"] = { + "ftr_meta": json.dumps(component_meta) + } + other_item["component_data"] = component_data other_item["component_location_name"] = unmanaged_location_name other_item["component_path"] = published_path component_list.append(other_item) @@ -424,3 +412,106 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): return None return matching_profile["status"] or None + + def _prepare_component_metadata( + self, instance, repre, component_path, is_review + ): + extension = os.path.splitext(component_path)[-1] + streams = [] + try: + streams = get_ffprobe_streams(component_path) + except Exception: + self.log.debug(( + "Failed to retrieve information about intput {}" + ).format(component_path)) + + # Find video streams + video_streams = [ + stream + for stream in streams + if stream["codec_type"] == "video" + ] + # Skip if there are not video streams + # - exr is special case which can have issues with reading through + # ffmpegh but we want to set fps for it + if not video_streams and extension not in [".exr"]: + return {} + + stream_width = None + stream_height = None + stream_fps = None + frame_out = None + for video_stream in video_streams: + input_framerate = video_stream.get("r_frame_rate") + duration = video_stream.get("duration") + tmp_width = video_stream.get("width") + tmp_height = video_stream.get("height") + if input_framerate is None or duration is None: + if tmp_width and tmp_height: + stream_width = int(tmp_width) + stream_height = int(tmp_height) + continue + try: + stream_fps = convert_ffprobe_fps_to_float( + input_framerate + ) + except ValueError: + self.log.warning(( + "Could not convert ffprobe fps to float \"{}\"" + ).format(input_framerate)) + continue + + stream_width = tmp_width + stream_height = tmp_height + + self.log.debug("FPS from stream is {} and duration is {}".format( + input_framerate, duration + )) + frame_out = float(duration) * stream_fps + break + + # Prepare FPS + instance_fps = instance.data.get("fps") + if instance_fps is None: + instance_fps = instance.context.data["fps"] + + if not is_review: + output = {} + fps = stream_fps or instance_fps + if fps: + output["frameRate"] = fps + + if stream_width and stream_height: + output["width"] = int(stream_width) + output["height"] = int(stream_height) + return output + + frame_start = repre.get("frameStartFtrack") + frame_end = repre.get("frameEndFtrack") + if frame_start is None or frame_end is None: + frame_start = instance.data["frameStart"] + frame_end = instance.data["frameEnd"] + + fps = None + repre_fps = repre.get("fps") + if repre_fps is not None: + repre_fps = float(repre_fps) + + fps = stream_fps or repre_fps or instance_fps + + # Frame end of uploaded video file should be duration in frames + # - frame start is always 0 + # - frame end is duration in frames + if not frame_out: + frame_out = frame_end - frame_start + 1 + + # Ftrack documentation says that it is required to have + # 'width' and 'height' in review component. But with those values + # review video does not play. + component_meta = { + "frameIn": 0, + "frameOut": frame_out, + "frameRate": float(fps) + } + + return component_meta From 99469a14438665595fafc21bdc517c083d76bd2c Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 3 Aug 2022 16:24:50 +0200 Subject: [PATCH 675/785] OP-3684 - revert - added default to Integrate Setting to skip render.farm" This reverts commit ef60744d Not necessary, better to use `instance.data["farm"]` --- openpype/plugins/publish/integrate.py | 11 +++++------ .../settings/defaults/project_settings/global.json | 11 +---------- 2 files changed, 6 insertions(+), 16 deletions(-) diff --git a/openpype/plugins/publish/integrate.py b/openpype/plugins/publish/integrate.py index 70ab9f611e..d817595888 100644 --- a/openpype/plugins/publish/integrate.py +++ b/openpype/plugins/publish/integrate.py @@ -167,7 +167,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin): skip_host_families = [] def process(self, instance): - if self.skip_instance_by_settings(instance): + if self._temp_skip_instance_by_settings(instance): return # Mark instance as processed for legacy integrator @@ -203,12 +203,11 @@ class IntegrateAsset(pyblish.api.InstancePlugin): # the try, except. file_transactions.finalize() - def skip_instance_by_settings(self, instance): - """Decide if instance will be processed with new integrator. + def _temp_skip_instance_by_settings(self, instance): + """Decide if instance will be processed with new or legacy integrator. - This might be temporary solution for broken publishing for any families - (therefore it should fallback into legacy publish plugin) OR this - could replace 'exclude_families' in legacy plugin (host is required). + This is temporary solution until we test all usecases with new (this) + integrator plugin. """ host_name = instance.context.data["hostName"] diff --git a/openpype/settings/defaults/project_settings/global.json b/openpype/settings/defaults/project_settings/global.json index d349066924..e509db2791 100644 --- a/openpype/settings/defaults/project_settings/global.json +++ b/openpype/settings/defaults/project_settings/global.json @@ -225,16 +225,7 @@ ] }, "IntegrateAsset": { - "skip_host_families": [ - { - "host": [ - "aftereffects" - ], - "families": [ - "render.farm" - ] - } - ] + "skip_host_families": [] }, "IntegrateHeroVersion": { "enabled": true, From bab5629e35736d94c01c012b0e5aee79fe95ba71 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 3 Aug 2022 16:26:46 +0200 Subject: [PATCH 676/785] OP-3684 - use instance.data["farm"] to skip local integrate No Settings necessary, instance itself should hold if it is targetted for farm (eg. not locally integrated.) --- .../hosts/aftereffects/plugins/publish/collect_render.py | 5 +++-- openpype/pipeline/publish/abstract_collect_render.py | 2 ++ 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/aftereffects/plugins/publish/collect_render.py b/openpype/hosts/aftereffects/plugins/publish/collect_render.py index bb199a61f7..d444ead6dc 100644 --- a/openpype/hosts/aftereffects/plugins/publish/collect_render.py +++ b/openpype/hosts/aftereffects/plugins/publish/collect_render.py @@ -102,7 +102,6 @@ class CollectAERender(publish.AbstractCollectRender): attachTo=False, setMembers='', publish=True, - renderer='aerender', name=subset_name, resolutionWidth=render_q.width, resolutionHeight=render_q.height, @@ -113,7 +112,6 @@ class CollectAERender(publish.AbstractCollectRender): frameStart=frame_start, frameEnd=frame_end, frameStep=1, - toBeRenderedOn='deadline', fps=fps, app_version=app_version, publish_attributes=inst.data.get("publish_attributes", {}), @@ -138,6 +136,9 @@ class CollectAERender(publish.AbstractCollectRender): fam = "render.farm" if fam not in instance.families: instance.families.append(fam) + instance.toBeRenderedOn = "deadline" + instance.renderer = "aerender" + instance.farm = True # to skip integrate instances.append(instance) instances_to_remove.append(inst) diff --git a/openpype/pipeline/publish/abstract_collect_render.py b/openpype/pipeline/publish/abstract_collect_render.py index 2e537227c3..ccb2415346 100644 --- a/openpype/pipeline/publish/abstract_collect_render.py +++ b/openpype/pipeline/publish/abstract_collect_render.py @@ -63,6 +63,8 @@ class RenderInstance(object): family = attr.ib(default="renderlayer") families = attr.ib(default=["renderlayer"]) # list of families + # True if should be rendered on farm, eg not integrate + farm = attr.ib(default=False) # format settings multipartExr = attr.ib(default=False) # flag for multipart exrs From e4c1c204d19ab5c97751c33428927d85771eacc3 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 3 Aug 2022 16:33:17 +0200 Subject: [PATCH 677/785] add metada also for src components --- .../publish/integrate_ftrack_instances.py | 18 +++++++++++++----- 1 file changed, 13 insertions(+), 5 deletions(-) diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py index 4c0e5127fa..a1e5922730 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py @@ -302,6 +302,13 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): component_data = copy_src_item["component_data"] component_name = component_data["name"] component_data["name"] = component_name + "_src" + component_meta = self._prepare_component_metadata( + instance, repre, copy_src_item["component_path"], False + ) + if component_meta: + component_data["metadata"] = { + "ftr_meta": json.dumps(component_meta) + } component_list.append(copy_src_item) # Add others representations as component @@ -442,14 +449,15 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): stream_fps = None frame_out = None for video_stream in video_streams: - input_framerate = video_stream.get("r_frame_rate") - duration = video_stream.get("duration") tmp_width = video_stream.get("width") tmp_height = video_stream.get("height") + if tmp_width and tmp_height: + stream_width = tmp_width + stream_height = tmp_height + + input_framerate = video_stream.get("r_frame_rate") + duration = video_stream.get("duration") if input_framerate is None or duration is None: - if tmp_width and tmp_height: - stream_width = int(tmp_width) - stream_height = int(tmp_height) continue try: stream_fps = convert_ffprobe_fps_to_float( From 59463a345784eda01a5ce9f158dd3d1ffb9a821d Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 3 Aug 2022 16:54:53 +0200 Subject: [PATCH 678/785] add new function to determine fps value --- openpype/lib/__init__.py | 2 ++ openpype/lib/transcoding.py | 37 +++++++++++++++++++++++++++++++++++++ 2 files changed, 39 insertions(+) diff --git a/openpype/lib/__init__.py b/openpype/lib/__init__.py index 31cd5e7510..3d3e425a86 100644 --- a/openpype/lib/__init__.py +++ b/openpype/lib/__init__.py @@ -115,6 +115,7 @@ from .transcoding import ( get_ffmpeg_codec_args, get_ffmpeg_format_args, convert_ffprobe_fps_value, + convert_ffprobe_fps_to_float, ) from .avalon_context import ( CURRENT_DOC_SCHEMAS, @@ -287,6 +288,7 @@ __all__ = [ "get_ffmpeg_codec_args", "get_ffmpeg_format_args", "convert_ffprobe_fps_value", + "convert_ffprobe_fps_to_float", "CURRENT_DOC_SCHEMAS", "PROJECT_NAME_ALLOWED_SYMBOLS", diff --git a/openpype/lib/transcoding.py b/openpype/lib/transcoding.py index ee9a0f08de..60d5d3ed4a 100644 --- a/openpype/lib/transcoding.py +++ b/openpype/lib/transcoding.py @@ -938,3 +938,40 @@ def convert_ffprobe_fps_value(str_value): fps = int(fps) return str(fps) + + +def convert_ffprobe_fps_to_float(value): + """Convert string value of frame rate to float. + + Copy of 'convert_ffprobe_fps_value' which raises exceptions on invalid + value, does not convert value to string and does not return "Unknown" + string. + + Args: + value (str): Value to be converted. + + Returns: + Float: Converted frame rate in float. If divisor in value is '0' then + '0.0' is returned. + + Raises: + ValueError: Passed value is invalid for conversion. + """ + + if not value: + raise ValueError("Got empty value.") + + items = value.split("/") + if len(items) == 1: + return float(items[0]) + + if len(items) > 2: + raise ValueError(( + "FPS expression contains multiple dividers \"{}\"." + ).format(value)) + + dividend = float(items.pop(0)) + divisor = float(items.pop(0)) + if divisor == 0.0: + return 0.0 + return dividend / divisor From cc048e4b6f053e9fdc9041fbc79ed6a82a0ecbd5 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 3 Aug 2022 16:57:56 +0200 Subject: [PATCH 679/785] OP-3704 - translated validate_containers.py into New publisher style AE could be already using NP. --- .../publish/help/validate_containers.xml | 24 +++++++++++++++++++ .../plugins/publish/validate_containers.py | 14 +++++++++-- 2 files changed, 36 insertions(+), 2 deletions(-) create mode 100644 openpype/plugins/publish/help/validate_containers.xml diff --git a/openpype/plugins/publish/help/validate_containers.xml b/openpype/plugins/publish/help/validate_containers.xml new file mode 100644 index 0000000000..e540c3c7a9 --- /dev/null +++ b/openpype/plugins/publish/help/validate_containers.xml @@ -0,0 +1,24 @@ + + + +Not up-to-date assets + +## Obsolete containers found + +Scene contains one or more obsolete loaded containers, eg. items loaded into scene by Loader. + +### How to repair? + +Use 'Scene Inventory' and update all highlighted old container to latest OR + refresh Publish and switch 'Validate Containers' toggle on 'Options' tab. + + WARNING: Skipping this validator will result in publishing (and probably rendering) old version of loaded assets. + + +### __Detailed Info__ (optional) + +This validator protects you from rendering obsolete content, someone modified some referenced asset in this scene, eg. + by skipping this you would ignore changes to that asset. + + + \ No newline at end of file diff --git a/openpype/plugins/publish/validate_containers.py b/openpype/plugins/publish/validate_containers.py index b2a3ed9b79..79759450e1 100644 --- a/openpype/plugins/publish/validate_containers.py +++ b/openpype/plugins/publish/validate_containers.py @@ -1,5 +1,9 @@ import pyblish.api from openpype.pipeline.load import any_outdated_containers +from openpype.pipeline import ( + PublishXmlValidationError, + OptionalPyblishPluginMixin +) class ShowInventory(pyblish.api.Action): @@ -14,7 +18,9 @@ class ShowInventory(pyblish.api.Action): host_tools.show_scene_inventory() -class ValidateContainers(pyblish.api.ContextPlugin): +class ValidateContainers(OptionalPyblishPluginMixin, + pyblish.api.ContextPlugin): + """Containers are must be updated to latest version on publish.""" label = "Validate Containers" @@ -24,5 +30,9 @@ class ValidateContainers(pyblish.api.ContextPlugin): actions = [ShowInventory] def process(self, context): + if not self.is_active(context.data): + return + if any_outdated_containers(): - raise ValueError("There are outdated containers in the scene.") + msg = "There are outdated containers in the scene." + raise PublishXmlValidationError(self, msg) From e43748bb3d0091f3d92f8f1ce64764dba54cf09d Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 3 Aug 2022 17:28:27 +0200 Subject: [PATCH 680/785] validate representation files sequence --- openpype/plugins/publish/integrate.py | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/openpype/plugins/publish/integrate.py b/openpype/plugins/publish/integrate.py index f7f5ca2aeb..f65ef80db7 100644 --- a/openpype/plugins/publish/integrate.py +++ b/openpype/plugins/publish/integrate.py @@ -541,8 +541,18 @@ class IntegrateAsset(pyblish.api.InstancePlugin): if any(os.path.isabs(fname) for fname in files): raise KnownPublishError("Given file names contain full paths") - src_collection = clique.assemble(files) + src_collections, remainders = clique.assemble(files) + if len(files) < 2 or len(src_collections) != 1 or remainders: + raise KnownPublishError(( + "Files of representation does not contain proper" + " sequence files.\nCollected collections: {}" + "\nCollected remainders: {}" + ).format( + ", ".join([str(col) for col in src_collections]), + ", ".join([str(rem) for rem in remainders]) + )) + src_collection = src_collections[0] destination_indexes = list(src_collection.indexes) # Use last frame for minimum padding # - that should cover both 'udim' and 'frame' minimum padding From e0fc9d5d12974563b90c0714e8f3605672690afb Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 3 Aug 2022 17:31:31 +0200 Subject: [PATCH 681/785] fix typo --- openpype/plugins/publish/integrate.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/openpype/plugins/publish/integrate.py b/openpype/plugins/publish/integrate.py index f65ef80db7..070ebc290c 100644 --- a/openpype/plugins/publish/integrate.py +++ b/openpype/plugins/publish/integrate.py @@ -601,9 +601,11 @@ class IntegrateAsset(pyblish.api.InstancePlugin): template_filled = anatomy_filled[template_name]["path"] dst_filepaths.append(template_filled) if repre_context is None: - repre_context = template_filled.used_value + self.log.debug( + "Template filled: {}".format(str(template_filled)) + ) + repre_context = template_filled.used_values - self.log.debug("Template filled: {}".format(str(template_filled))) # Make sure context contains frame # NOTE: Frame would not be available only if template does not # contain '{frame}' in template -> Do we want support it? From 9d8a05d8a7b627b77cc434662ef39868847f31ee Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 3 Aug 2022 17:33:26 +0200 Subject: [PATCH 682/785] fix dst collection access --- openpype/plugins/publish/integrate.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/plugins/publish/integrate.py b/openpype/plugins/publish/integrate.py index 070ebc290c..688e252f1b 100644 --- a/openpype/plugins/publish/integrate.py +++ b/openpype/plugins/publish/integrate.py @@ -613,7 +613,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin): repre_context["frame"] = first_index_padded # Update the destination indexes and padding - dst_collection = clique.assemble(dst_filepaths) + dst_collection = clique.assemble(dst_filepaths)[0][0] dst_collection.padding = destination_padding if len(src_collection.indexes) != len(dst_collection.indexes): raise KnownPublishError(( From 7cfd9624a31b16f8bfff52684bacc7b9366cb925 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 3 Aug 2022 17:39:35 +0200 Subject: [PATCH 683/785] "OP-3684 - revert - fix for new publisher" This reverts commit 80b6ef98 Made obsolete by https://github.com/pypeclub/OpenPype/pull/3611 --- .../plugins/publish/extract_local_render.py | 38 ++----------------- 1 file changed, 3 insertions(+), 35 deletions(-) diff --git a/openpype/hosts/aftereffects/plugins/publish/extract_local_render.py b/openpype/hosts/aftereffects/plugins/publish/extract_local_render.py index 67a89ba9df..7323a0b125 100644 --- a/openpype/hosts/aftereffects/plugins/publish/extract_local_render.py +++ b/openpype/hosts/aftereffects/plugins/publish/extract_local_render.py @@ -1,8 +1,7 @@ import os import sys import six -import re -import shutil + import openpype.api from openpype.hosts.aftereffects.api import get_stub @@ -23,26 +22,15 @@ class ExtractLocalRender(openpype.api.Extractor): # pull file name from Render Queue Output module render_q = stub.get_render_info() stub.render(staging_dir) - render_q_file_name = render_q.file_name if not render_q: raise ValueError("No file extension set in Render Queue") - _, ext = os.path.splitext(os.path.basename(render_q_file_name)) + _, ext = os.path.splitext(os.path.basename(render_q.file_name)) ext = ext[1:] - replace_frames_format = self._get_replace_format(render_q_file_name) - first_file_path = None files = [] + self.log.info("files::{}".format(os.listdir(staging_dir))) for file_name in os.listdir(staging_dir): - _, found_ext = os.path.splitext(file_name) - if found_ext[1:] != ext: - continue - - if replace_frames_format: - file_name = self._translate_frames(file_name, - replace_frames_format, - staging_dir) - files.append(file_name) if first_file_path is None: first_file_path = os.path.join(staging_dir, @@ -90,23 +78,3 @@ class ExtractLocalRender(openpype.api.Extractor): "stagingDir": staging_dir, "tags": ["thumbnail"] }) - - def _translate_frames(self, file_name, replace_frames_format, staging_dir): - orig_file_name = file_name - - found_frames = re.search(replace_frames_format, file_name) - if found_frames: - new_frames = found_frames.group(0).replace('_', '.') - file_name = file_name.replace(found_frames.group(0), new_frames) - shutil.move(os.path.join(staging_dir, orig_file_name), - os.path.join(staging_dir, file_name)) - - return file_name - - def _get_replace_format(self, file_name): - # replace delimiter for frames to one integrate is expecting (.0000.) - # returns frame format to be replaced - hashes_found = re.search(r"(_%5B[#]*%5D.)", file_name) - if hashes_found: - hashes = re.sub("[^#]", '', hashes_found.group(0)) - return "_[0-9]{{{0}}}.".format(len(hashes)) From c4fce5fea9ad37e0706c1b76500ed21585e66141 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 3 Aug 2022 17:40:47 +0200 Subject: [PATCH 684/785] integrate description can use optional keys --- .../publish/integrate_ftrack_description.py | 69 +++++++++++++------ 1 file changed, 49 insertions(+), 20 deletions(-) diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_description.py b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_description.py index c6a3d47f66..e7c265988e 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_description.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_description.py @@ -6,9 +6,11 @@ Requires: """ import sys +import json import six import pyblish.api +from openpype.lib import StringTemplate class IntegrateFtrackDescription(pyblish.api.InstancePlugin): @@ -25,6 +27,10 @@ class IntegrateFtrackDescription(pyblish.api.InstancePlugin): description_template = "{comment}" def process(self, instance): + if not self.description_template: + self.log.info("Skipping. Description template is not set.") + return + # Check if there are any integrated AssetVersion entities asset_versions_key = "ftrackIntegratedAssetVersionsData" asset_versions_data_by_id = instance.data.get(asset_versions_key) @@ -38,39 +44,62 @@ class IntegrateFtrackDescription(pyblish.api.InstancePlugin): else: self.log.debug("Comment is set to `{}`".format(comment)) - session = instance.context.data["ftrackSession"] - intent = instance.context.data.get("intent") - intent_label = None - if intent and isinstance(intent, dict): - intent_val = intent.get("value") - intent_label = intent.get("label") - else: - intent_val = intent + if intent and "{intent}" in self.description_template: + value = intent.get("value") + if value: + intent = intent.get("label") or value - if not intent_label: - intent_label = intent_val or "" + if not intent and not comment: + self.log.info("Skipping. Intent and comment are empty.") + return # if intent label is set then format comment # - it is possible that intent_label is equal to "" (empty string) - if intent_label: - self.log.debug( - "Intent label is set to `{}`.".format(intent_label) - ) - + if intent: + self.log.debug("Intent is set to `{}`.".format(intent)) else: self.log.debug("Intent is not set.") + # If we would like to use more "optional" possibilities we would have + # come up with some expressions in templates or speicifc templates + # for all 3 possible combinations when comment and intent are + # set or not (when both are not set then description does not + # make sense). + fill_data = {} + if comment: + fill_data["comment"] = comment + if intent: + fill_data["intent"] = intent + + description = StringTemplate.format_template( + self.description_template, fill_data + ) + if not description.solved: + self.log.warning(( + "Couldn't solve template \"{}\" with data {}" + ).format( + self.description_template, json.dumps(fill_data, indent=4) + )) + return + + if not description: + self.log.debug(( + "Skipping. Result of template is empty string." + " Template \"{}\" Fill data: {}" + ).format( + self.description_template, json.dumps(fill_data, indent=4) + )) + return + + session = instance.context.data["ftrackSession"] for asset_version_data in asset_versions_data_by_id.values(): asset_version = asset_version_data["asset_version"] # Backwards compatibility for older settings using # attribute 'note_with_intent_template' - comment = self.description_template.format(**{ - "intent": intent_label, - "comment": comment - }) - asset_version["comment"] = comment + + asset_version["comment"] = description try: session.commit() From d7d8d45ee5589741092a66187f42f2332296420a Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 3 Aug 2022 18:27:08 +0200 Subject: [PATCH 685/785] OP-3405 - representation is not a list Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> --- openpype/modules/sync_server/tray/models.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/modules/sync_server/tray/models.py b/openpype/modules/sync_server/tray/models.py index f05a5bd8ea..629c4cbbf1 100644 --- a/openpype/modules/sync_server/tray/models.py +++ b/openpype/modules/sync_server/tray/models.py @@ -923,7 +923,7 @@ class SyncRepresentationSummaryModel(_SyncRepresentationModel): representation = get_representation_by_id(self.project, repre_id) if representation: self.sync_server.update_db(self.project, None, None, - representation.pop(), + representation, get_local_site_id(), priority=value) self.is_editing = False From 8f5360d9d55efefc7bdfa9e182b279bb046ce733 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 3 Aug 2022 18:28:40 +0200 Subject: [PATCH 686/785] added ability to keep '<>' without formatting content unchanged --- openpype/lib/path_templates.py | 26 ++++++++++++++++++++------ 1 file changed, 20 insertions(+), 6 deletions(-) diff --git a/openpype/lib/path_templates.py b/openpype/lib/path_templates.py index c1282016ef..e4b18ec258 100644 --- a/openpype/lib/path_templates.py +++ b/openpype/lib/path_templates.py @@ -211,15 +211,28 @@ class StringTemplate(object): if counted_symb > -1: parts = tmp_parts.pop(counted_symb) counted_symb -= 1 + # If part contains only single string keep value + # unchanged if parts: # Remove optional start char parts.pop(0) - if counted_symb < 0: - out_parts = new_parts - else: - out_parts = tmp_parts[counted_symb] - # Store temp parts - out_parts.append(OptionalPart(parts)) + + if not parts: + value = "<>" + elif ( + len(parts) == 1 + and isinstance(parts[0], six.string_types) + ): + value = "<{}>".format(parts[0]) + else: + value = OptionalPart(parts) + + if counted_symb < 0: + out_parts = new_parts + else: + out_parts = tmp_parts[counted_symb] + # Store value + out_parts.append(value) continue if counted_symb < 0: @@ -793,6 +806,7 @@ class OptionalPart: parts(list): Parts of template. Can contain 'str', 'OptionalPart' or 'FormattingPart'. """ + def __init__(self, parts): self._parts = parts From 09e68b5a257916e07fcd8824fb0695b6e032a856 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 3 Aug 2022 18:30:25 +0200 Subject: [PATCH 687/785] use StringTemplate in integrate ftrack note --- .../plugins/publish/integrate_ftrack_note.py | 54 ++++++++++++------- 1 file changed, 34 insertions(+), 20 deletions(-) diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_note.py b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_note.py index 77a7ebdfcf..ac3fa874e0 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_note.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_note.py @@ -9,9 +9,11 @@ Requires: """ import sys +import copy import six import pyblish.api +from openpype.lib import StringTemplate class IntegrateFtrackNote(pyblish.api.InstancePlugin): @@ -53,14 +55,10 @@ class IntegrateFtrackNote(pyblish.api.InstancePlugin): intent = instance.context.data.get("intent") intent_label = None - if intent and isinstance(intent, dict): - intent_val = intent.get("value") - intent_label = intent.get("label") - else: - intent_val = intent - - if not intent_label: - intent_label = intent_val or "" + if intent: + value = intent["value"] + if value: + intent_label = intent["label"] or value # if intent label is set then format comment # - it is possible that intent_label is equal to "" (empty string) @@ -96,6 +94,14 @@ class IntegrateFtrackNote(pyblish.api.InstancePlugin): labels.append(label) + base_format_data = { + "host_name": host_name, + "app_name": app_name, + "app_label": app_label, + "source": instance.data.get("source", '') + } + if comment: + base_format_data["comment"] = comment for asset_version_data in asset_versions_data_by_id.values(): asset_version = asset_version_data["asset_version"] component_items = asset_version_data["component_items"] @@ -109,23 +115,31 @@ class IntegrateFtrackNote(pyblish.api.InstancePlugin): template = self.note_template if template is None: template = self.note_with_intent_template - format_data = { - "intent": intent_label, - "comment": comment, - "host_name": host_name, - "app_name": app_name, - "app_label": app_label, - "published_paths": "
".join(sorted(published_paths)), - "source": instance.data.get("source", '') - } - comment = template.format(**format_data) - if not comment: + format_data = copy.deepcopy(base_format_data) + format_data["published_paths"] = "
".join( + sorted(published_paths) + ) + if intent: + if "{intent}" in template: + format_data["intent"] = intent_label + else: + format_data["intent"] = intent + + note_text = StringTemplate.format_template(template, format_data) + if not note_text.solved: + self.log.warning(( + "Note template require more keys then can be provided." + "\nTemplate: {}\nData: {}" + ).format(template, format_data)) + continue + + if not note_text: self.log.info(( "Note for AssetVersion {} would be empty. Skipping." "\nTemplate: {}\nData: {}" ).format(asset_version["id"], template, format_data)) continue - asset_version.create_note(comment, author=user, labels=labels) + asset_version.create_note(note_text, author=user, labels=labels) try: session.commit() From 3137644299e4ade30ff8e9fe1184cf0430e3a925 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Thu, 4 Aug 2022 11:07:29 +0200 Subject: [PATCH 688/785] :recycle: change macos installer --- setup.py | 2 +- tools/build.sh | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/setup.py b/setup.py index 8b5a545c16..eab0187983 100644 --- a/setup.py +++ b/setup.py @@ -152,7 +152,7 @@ build_exe_options = dict( ) bdist_mac_options = dict( - bundle_name="OpenPype", + bundle_name=f"OpenPype {__version__}", iconfile=mac_icon_path ) diff --git a/tools/build.sh b/tools/build.sh index 79fb748cd5..fa2c580648 100755 --- a/tools/build.sh +++ b/tools/build.sh @@ -193,15 +193,15 @@ if [ "$disable_submodule_update" == 1 ]; then if [[ "$OSTYPE" == "darwin"* ]]; then # fix code signing issue - codesign --remove-signature "$openpype_root/build/OpenPype.app/Contents/MacOS/lib/Python" + codesign --remove-signature "$openpype_root/build/OpenPype $openpype_version.app/Contents/MacOS/lib/Python" if command -v create-dmg > /dev/null 2>&1; then create-dmg \ - --volname "OpenPype Installer" \ + --volname "OpenPype $openpype_version Installer" \ --window-pos 200 120 \ --window-size 600 300 \ --app-drop-link 100 50 \ - "$openpype_root/build/OpenPype-Installer.dmg" \ - "$openpype_root/build/OpenPype.app" + "$openpype_root/build/OpenPype-Installer-$openpype_version.dmg" \ + "$openpype_root/build/OpenPype $openpype_version.app" else echo -e "${BIYellow}!!!${RST} ${BIWhite}create-dmg${RST} command is not available." fi From 633c7a5cde89a27c69ad24108ef802c66da02c41 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Thu, 4 Aug 2022 11:26:33 +0200 Subject: [PATCH 689/785] :hammer: add more verbose info to Deadline --- .../repository/custom/plugins/GlobalJobPreLoad.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py b/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py index 5e923eb09a..793ee782f4 100644 --- a/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py +++ b/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py @@ -63,7 +63,7 @@ def inject_openpype_environment(deadlinePlugin): print(("Scanning for compatible requested " f"version {requested_version}")) install_dir = DirectoryUtils.SearchDirectoryList(dir_list) - if dir: + if install_dir: sub_dirs = [ f.path for f in os.scandir(install_dir) if f.is_dir() @@ -72,6 +72,7 @@ def inject_openpype_environment(deadlinePlugin): version = get_openpype_version_from_path(subdir) if not version: continue + print(f" - found: {version} - {subdir}") openpype_versions.append((version, subdir)) exe = FileUtils.SearchFileList(exe_list) @@ -81,12 +82,15 @@ def inject_openpype_environment(deadlinePlugin): version = get_openpype_version_from_path( os.path.dirname(exe)) if version: + print(f" - found: {version} - {os.path.dirname(exe)}") openpype_versions.append((version, os.path.dirname(exe))) if requested_version: # sort detected versions if openpype_versions: openpype_versions.sort(key=lambda ver: ver[0]) + print(("Latest available version found is " + f"{openpype_versions[-1][0]}")) requested_major, requested_minor, _ = requested_version.split(".")[:3] # noqa: E501 compatible_versions = [] for version in openpype_versions: @@ -102,6 +106,8 @@ def inject_openpype_environment(deadlinePlugin): "directory.").format(requested_version)) # sort compatible versions nad pick the last one compatible_versions.sort(key=lambda ver: ver[0]) + print(("Latest compatible version found is " + f"{compatible_versions[-1][0]}")) # create list of executables for different platform and let # Deadline decide. exe_list = [ From b9703f3fda15a9999edba3ce4be1bae43f74913a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Thu, 4 Aug 2022 11:43:48 +0200 Subject: [PATCH 690/785] :bug: fix inverted condition --- .../deadline/repository/custom/plugins/GlobalJobPreLoad.py | 2 +- .../deadline/repository/custom/plugins/OpenPype/OpenPype.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py b/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py index 793ee782f4..e0fd22e218 100644 --- a/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py +++ b/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py @@ -23,7 +23,7 @@ def get_openpype_version_from_path(path, build=True): if not os.path.isfile(version_file): return None # skip if the version is not build - if not build and \ + if build and \ (not os.path.isfile(os.path.join(path, "openpype_console")) or not os.path.isfile(os.path.join(path, "openpype_console.exe"))): return None diff --git a/openpype/modules/deadline/repository/custom/plugins/OpenPype/OpenPype.py b/openpype/modules/deadline/repository/custom/plugins/OpenPype/OpenPype.py index 79101bb90c..3eba347770 100644 --- a/openpype/modules/deadline/repository/custom/plugins/OpenPype/OpenPype.py +++ b/openpype/modules/deadline/repository/custom/plugins/OpenPype/OpenPype.py @@ -74,7 +74,7 @@ class OpenPypeDeadlinePlugin(DeadlinePlugin): if not os.path.isfile(version_file): return None # skip if the version is not build - if not build and \ + if build and \ (not os.path.isfile(os.path.join(path, "openpype_console")) or not os.path.isfile(os.path.join(path, "openpype_console.exe"))): # noqa: E501 return None From b65a360ca6415269fcd90a0ab1385be87ad8bb0b Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 4 Aug 2022 12:25:08 +0200 Subject: [PATCH 691/785] fix types in default settings --- openpype/settings/defaults/project_settings/maya.json | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index b98506f6a8..d52dd407f2 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -42,14 +42,14 @@ "multilayer_exr": true, "tiled": true, "aov_list": [], - "additional_options": {} + "additional_options": [] }, "vray_renderer": { "image_prefix": "maya///", "engine": "1", "image_format": "png", "aov_list": [], - "additional_options": {} + "additional_options": [] }, "redshift_renderer": { "image_prefix": "maya///", @@ -59,7 +59,7 @@ "multilayer_exr": true, "force_combine": true, "aov_list": [], - "additional_options": {} + "additional_options": [] } }, "create": { From a32ca255f6edd3c1c3f0b47c212a035e6b169792 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 4 Aug 2022 12:25:30 +0200 Subject: [PATCH 692/785] resave settings to match formattings --- .../defaults/project_settings/maya.json | 31 +++++++++---------- .../project_settings/traypublisher.json | 8 +++-- 2 files changed, 21 insertions(+), 18 deletions(-) diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index d52dd407f2..ac0f161cf2 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -99,6 +99,20 @@ "enabled": true, "publish_mip_map": true }, + "CreateAnimation": { + "enabled": true, + "write_color_sets": false, + "defaults": [ + "Main" + ] + }, + "CreatePointCache": { + "enabled": true, + "write_color_sets": false, + "defaults": [ + "Main" + ] + }, "CreateMultiverseUsd": { "enabled": true, "defaults": [ @@ -117,14 +131,6 @@ "Main" ] }, - "CreateAnimation": { - "enabled": true, - "write_color_sets": false, - "defaults": [ - "Main" - ] - - }, "CreateAss": { "enabled": true, "defaults": [ @@ -163,13 +169,6 @@ "Sculpt" ] }, - "CreatePointCache": { - "enabled": true, - "write_color_sets": false, - "defaults": [ - "Main" - ] - }, "CreateRenderSetup": { "enabled": true, "defaults": [ @@ -977,4 +976,4 @@ "ValidateNoAnimation": false } } -} +} \ No newline at end of file diff --git a/openpype/settings/defaults/project_settings/traypublisher.json b/openpype/settings/defaults/project_settings/traypublisher.json index 2cb7d358ed..5db2a79772 100644 --- a/openpype/settings/defaults/project_settings/traypublisher.json +++ b/openpype/settings/defaults/project_settings/traypublisher.json @@ -294,8 +294,12 @@ } }, "BatchMovieCreator": { - "default_variants": ["Main"], - "default_tasks": ["Compositing"], + "default_variants": [ + "Main" + ], + "default_tasks": [ + "Compositing" + ], "extensions": [ ".mov" ] From 03c648c8fd897ab374752eea1175f6c67b281afe Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Thu, 4 Aug 2022 13:08:51 +0200 Subject: [PATCH 693/785] :bug: fix executable detection on platforms --- .../custom/plugins/GlobalJobPreLoad.py | 24 ++++++++++++++----- .../custom/plugins/OpenPype/OpenPype.py | 17 ++++++++++--- 2 files changed, 32 insertions(+), 9 deletions(-) diff --git a/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py b/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py index e0fd22e218..2972eeec40 100644 --- a/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py +++ b/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py @@ -19,14 +19,24 @@ def get_openpype_version_from_path(path, build=True): str or None: version of OpenPype if found. """ + # fix path for application bundle on macos + if platform.system().lower() == "darwin": + path = os.path.join(path, "Contents", "MacOS", "lib", "Python") + version_file = os.path.join(path, "openpype", "version.py") if not os.path.isfile(version_file): return None + # skip if the version is not build - if build and \ - (not os.path.isfile(os.path.join(path, "openpype_console")) or - not os.path.isfile(os.path.join(path, "openpype_console.exe"))): + exe = os.path.join(path, "openpype_console.exe") + if platform.system().lower() in ["linux", "darwin"]: + exe = os.path.join(path, "openpype_console") + + # if only builds are requested + if build and not os.path.isfile(exe): # noqa: E501 + print(f" ! path is not a build: {path}") return None + version = {} with open(version_file, "r") as vf: exec(vf.read(), version) @@ -64,6 +74,7 @@ def inject_openpype_environment(deadlinePlugin): f"version {requested_version}")) install_dir = DirectoryUtils.SearchDirectoryList(dir_list) if install_dir: + print(f"Looking for OpenPype at: {install_dir}") sub_dirs = [ f.path for f in os.scandir(install_dir) if f.is_dir() @@ -79,6 +90,7 @@ def inject_openpype_environment(deadlinePlugin): if openpype_versions: # if looking for requested compatible version, # add the implicitly specified to the list too. + print(f"Looking for OpenPype at: {os.path.dirname(exe)}") version = get_openpype_version_from_path( os.path.dirname(exe)) if version: @@ -89,8 +101,8 @@ def inject_openpype_environment(deadlinePlugin): # sort detected versions if openpype_versions: openpype_versions.sort(key=lambda ver: ver[0]) - print(("Latest available version found is " - f"{openpype_versions[-1][0]}")) + print(("Latest available version found is " + f"{openpype_versions[-1][0]}")) requested_major, requested_minor, _ = requested_version.split(".")[:3] # noqa: E501 compatible_versions = [] for version in openpype_versions: @@ -166,7 +178,7 @@ def inject_openpype_environment(deadlinePlugin): env["OPENPYPE_HEADLESS_MODE"] = "1" env["AVALON_TIMEOUT"] = "5000" - print(">>> Executing: {}".format(args)) + print(">>> Executing: {}".format(" ".join(args))) std_output = subprocess.check_output(args, cwd=os.path.dirname(exe), env=env) diff --git a/openpype/modules/deadline/repository/custom/plugins/OpenPype/OpenPype.py b/openpype/modules/deadline/repository/custom/plugins/OpenPype/OpenPype.py index 3eba347770..aa3ddc7088 100644 --- a/openpype/modules/deadline/repository/custom/plugins/OpenPype/OpenPype.py +++ b/openpype/modules/deadline/repository/custom/plugins/OpenPype/OpenPype.py @@ -13,6 +13,7 @@ from Deadline.Scripting import ( import re import os +import platform ###################################################################### @@ -70,14 +71,24 @@ class OpenPypeDeadlinePlugin(DeadlinePlugin): str or None: version of OpenPype if found. """ + # fix path for application bundle on macos + if platform.system().lower() == "darwin": + path = os.path.join(path, "Contents", "MacOS", "lib", "Python") + version_file = os.path.join(path, "openpype", "version.py") if not os.path.isfile(version_file): return None + # skip if the version is not build - if build and \ - (not os.path.isfile(os.path.join(path, "openpype_console")) or - not os.path.isfile(os.path.join(path, "openpype_console.exe"))): # noqa: E501 + exe = os.path.join(path, "openpype_console.exe") + if platform.system().lower() in ["linux", "darwin"]: + exe = os.path.join(path, "openpype_console") + + # if only builds are requested + if build and not os.path.isfile(exe): # noqa: E501 + print(f" ! path is not a build: {path}") return None + version = {} with open(version_file, "r") as vf: exec(vf.read(), version) From 53877ebe96114f3a38e428c502d05ce72ec4dc46 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Thu, 4 Aug 2022 13:25:56 +0200 Subject: [PATCH 694/785] :rotating_light: unify output messages --- .../repository/custom/plugins/GlobalJobPreLoad.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py b/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py index 2972eeec40..b8a31e01ff 100644 --- a/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py +++ b/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py @@ -70,11 +70,11 @@ def inject_openpype_environment(deadlinePlugin): # lets go over all available and find compatible build. requested_version = job.GetJobEnvironmentKeyValue("OPENPYPE_VERSION") if requested_version: - print(("Scanning for compatible requested " + print((">>> Scanning for compatible requested " f"version {requested_version}")) install_dir = DirectoryUtils.SearchDirectoryList(dir_list) if install_dir: - print(f"Looking for OpenPype at: {install_dir}") + print(f"--- Looking for OpenPype at: {install_dir}") sub_dirs = [ f.path for f in os.scandir(install_dir) if f.is_dir() @@ -83,7 +83,7 @@ def inject_openpype_environment(deadlinePlugin): version = get_openpype_version_from_path(subdir) if not version: continue - print(f" - found: {version} - {subdir}") + print(f" - found: {version} - {subdir}") openpype_versions.append((version, subdir)) exe = FileUtils.SearchFileList(exe_list) @@ -94,14 +94,14 @@ def inject_openpype_environment(deadlinePlugin): version = get_openpype_version_from_path( os.path.dirname(exe)) if version: - print(f" - found: {version} - {os.path.dirname(exe)}") + print(f" - found: {version} - {os.path.dirname(exe)}") openpype_versions.append((version, os.path.dirname(exe))) if requested_version: # sort detected versions if openpype_versions: openpype_versions.sort(key=lambda ver: ver[0]) - print(("Latest available version found is " + print(("*** Latest available version found is " f"{openpype_versions[-1][0]}")) requested_major, requested_minor, _ = requested_version.split(".")[:3] # noqa: E501 compatible_versions = [] @@ -118,7 +118,7 @@ def inject_openpype_environment(deadlinePlugin): "directory.").format(requested_version)) # sort compatible versions nad pick the last one compatible_versions.sort(key=lambda ver: ver[0]) - print(("Latest compatible version found is " + print(("*** Latest compatible version found is " f"{compatible_versions[-1][0]}")) # create list of executables for different platform and let # Deadline decide. From 097638c9e54c6fd6cd02d88d456e820c72c6a9fd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Thu, 4 Aug 2022 14:35:01 +0200 Subject: [PATCH 695/785] :recycle: natural sort versions --- .../repository/custom/plugins/GlobalJobPreLoad.py | 13 +++++++++++-- .../repository/custom/plugins/OpenPype/OpenPype.py | 12 ++++++++++-- 2 files changed, 21 insertions(+), 4 deletions(-) diff --git a/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py b/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py index b8a31e01ff..17f911a686 100644 --- a/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py +++ b/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py @@ -100,7 +100,12 @@ def inject_openpype_environment(deadlinePlugin): if requested_version: # sort detected versions if openpype_versions: - openpype_versions.sort(key=lambda ver: ver[0]) + # use natural sorting + openpype_versions.sort( + key=lambda ver: [ + int(t) if t.isdigit() else t.lower() + for t in re.split('(\d+)', ver[0]) + ]) print(("*** Latest available version found is " f"{openpype_versions[-1][0]}")) requested_major, requested_minor, _ = requested_version.split(".")[:3] # noqa: E501 @@ -117,7 +122,11 @@ def inject_openpype_environment(deadlinePlugin): "in Deadline or install it to configured " "directory.").format(requested_version)) # sort compatible versions nad pick the last one - compatible_versions.sort(key=lambda ver: ver[0]) + compatible_versions.sort( + key=lambda ver: [ + int(t) if t.isdigit() else t.lower() + for t in re.split('(\d+)', ver[0]) + ]) print(("*** Latest compatible version found is " f"{compatible_versions[-1][0]}")) # create list of executables for different platform and let diff --git a/openpype/modules/deadline/repository/custom/plugins/OpenPype/OpenPype.py b/openpype/modules/deadline/repository/custom/plugins/OpenPype/OpenPype.py index aa3ddc7088..d270a1b87e 100644 --- a/openpype/modules/deadline/repository/custom/plugins/OpenPype/OpenPype.py +++ b/openpype/modules/deadline/repository/custom/plugins/OpenPype/OpenPype.py @@ -132,7 +132,11 @@ class OpenPypeDeadlinePlugin(DeadlinePlugin): if requested_version: # sort detected versions if openpype_versions: - openpype_versions.sort(key=lambda ver: ver[0]) + openpype_versions.sort( + key=lambda ver: [ + int(t) if t.isdigit() else t.lower() + for t in re.split('(\d+)', ver[0]) + ]) requested_major, requested_minor, _ = requested_version.split(".")[:3] # noqa: E501 compatible_versions = [] for version in openpype_versions: @@ -146,7 +150,11 @@ class OpenPypeDeadlinePlugin(DeadlinePlugin): "in Deadline or install it to configured " "directory.").format(requested_version)) # sort compatible versions nad pick the last one - compatible_versions.sort(key=lambda ver: ver[0]) + compatible_versions.sort( + key=lambda ver: [ + int(t) if t.isdigit() else t.lower() + for t in re.split('(\d+)', ver[0]) + ]) # create list of executables for different platform and let # Deadline decide. exe_list = [ From 7de8c3394a0aa3ed5dadb6fb78e4b217956509bb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Thu, 4 Aug 2022 14:38:57 +0200 Subject: [PATCH 696/785] :rotating_light: fix invalid sequence warning --- .../deadline/repository/custom/plugins/GlobalJobPreLoad.py | 4 ++-- .../deadline/repository/custom/plugins/OpenPype/OpenPype.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py b/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py index 17f911a686..ae5f2e5914 100644 --- a/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py +++ b/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py @@ -104,7 +104,7 @@ def inject_openpype_environment(deadlinePlugin): openpype_versions.sort( key=lambda ver: [ int(t) if t.isdigit() else t.lower() - for t in re.split('(\d+)', ver[0]) + for t in re.split(r"(\d+)", ver[0]) ]) print(("*** Latest available version found is " f"{openpype_versions[-1][0]}")) @@ -125,7 +125,7 @@ def inject_openpype_environment(deadlinePlugin): compatible_versions.sort( key=lambda ver: [ int(t) if t.isdigit() else t.lower() - for t in re.split('(\d+)', ver[0]) + for t in re.split(r"(\d+)", ver[0]) ]) print(("*** Latest compatible version found is " f"{compatible_versions[-1][0]}")) diff --git a/openpype/modules/deadline/repository/custom/plugins/OpenPype/OpenPype.py b/openpype/modules/deadline/repository/custom/plugins/OpenPype/OpenPype.py index d270a1b87e..00292ed5a9 100644 --- a/openpype/modules/deadline/repository/custom/plugins/OpenPype/OpenPype.py +++ b/openpype/modules/deadline/repository/custom/plugins/OpenPype/OpenPype.py @@ -135,7 +135,7 @@ class OpenPypeDeadlinePlugin(DeadlinePlugin): openpype_versions.sort( key=lambda ver: [ int(t) if t.isdigit() else t.lower() - for t in re.split('(\d+)', ver[0]) + for t in re.split(r"(\d+)", ver[0]) ]) requested_major, requested_minor, _ = requested_version.split(".")[:3] # noqa: E501 compatible_versions = [] @@ -153,7 +153,7 @@ class OpenPypeDeadlinePlugin(DeadlinePlugin): compatible_versions.sort( key=lambda ver: [ int(t) if t.isdigit() else t.lower() - for t in re.split('(\d+)', ver[0]) + for t in re.split(r"(\d+)", ver[0]) ]) # create list of executables for different platform and let # Deadline decide. From 52eba357d6c1eeae3b2b73d13ec99140a8801a9b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Thu, 4 Aug 2022 14:46:24 +0200 Subject: [PATCH 697/785] :rotating_light: fix hound :dog: --- .../repository/custom/plugins/GlobalJobPreLoad.py | 8 ++++---- .../repository/custom/plugins/OpenPype/OpenPype.py | 8 ++++---- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py b/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py index ae5f2e5914..172649c951 100644 --- a/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py +++ b/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py @@ -123,10 +123,10 @@ def inject_openpype_environment(deadlinePlugin): "directory.").format(requested_version)) # sort compatible versions nad pick the last one compatible_versions.sort( - key=lambda ver: [ - int(t) if t.isdigit() else t.lower() - for t in re.split(r"(\d+)", ver[0]) - ]) + key=lambda ver: [ + int(t) if t.isdigit() else t.lower() + for t in re.split(r"(\d+)", ver[0]) + ]) print(("*** Latest compatible version found is " f"{compatible_versions[-1][0]}")) # create list of executables for different platform and let diff --git a/openpype/modules/deadline/repository/custom/plugins/OpenPype/OpenPype.py b/openpype/modules/deadline/repository/custom/plugins/OpenPype/OpenPype.py index 00292ed5a9..6b0f69d98f 100644 --- a/openpype/modules/deadline/repository/custom/plugins/OpenPype/OpenPype.py +++ b/openpype/modules/deadline/repository/custom/plugins/OpenPype/OpenPype.py @@ -151,10 +151,10 @@ class OpenPypeDeadlinePlugin(DeadlinePlugin): "directory.").format(requested_version)) # sort compatible versions nad pick the last one compatible_versions.sort( - key=lambda ver: [ - int(t) if t.isdigit() else t.lower() - for t in re.split(r"(\d+)", ver[0]) - ]) + key=lambda ver: [ + int(t) if t.isdigit() else t.lower() + for t in re.split(r"(\d+)", ver[0]) + ]) # create list of executables for different platform and let # Deadline decide. exe_list = [ From bfa906eb62043decb0c55549fbc678575384c052 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 4 Aug 2022 15:35:09 +0200 Subject: [PATCH 698/785] OP-3698 - added profile to Webpublisher settings for timeouts Currently applicable only to PS --- .../project_settings/webpublisher.json | 9 ++++++ .../schema_project_webpublisher.json | 32 +++++++++++++++++++ 2 files changed, 41 insertions(+) diff --git a/openpype/settings/defaults/project_settings/webpublisher.json b/openpype/settings/defaults/project_settings/webpublisher.json index 77168c25e6..cba472514e 100644 --- a/openpype/settings/defaults/project_settings/webpublisher.json +++ b/openpype/settings/defaults/project_settings/webpublisher.json @@ -1,4 +1,13 @@ { + "timeout_profiles": [ + { + "hosts": [ + "photoshop" + ], + "task_types": [], + "timeout": 600 + } + ], "publish": { "CollectPublishedFiles": { "task_type_to_family": { diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_webpublisher.json b/openpype/settings/entities/schemas/projects_schema/schema_project_webpublisher.json index b76a0fa844..2ef7a05b21 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_webpublisher.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_webpublisher.json @@ -5,6 +5,38 @@ "label": "Web Publisher", "is_file": true, "children": [ + { + "type": "list", + "collapsible": true, + "use_label_wrap": true, + "key": "timeout_profiles", + "label": "Timeout profiles", + "object_type": { + "type": "dict", + "children": [ + { + "key": "hosts", + "label": "Host names", + "type": "hosts-enum", + "multiselection": true + }, + { + "key": "task_types", + "label": "Task types", + "type": "task-types-enum", + "multiselection": true + }, + { + "type": "separator" + }, + { + "type": "number", + "key": "timeout", + "label": "Timeout (sec)" + } + ] + } + }, { "type": "dict", "collapsible": true, From c05f893333aed9a3a1638a097b15d682b886bb3d Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 4 Aug 2022 15:36:16 +0200 Subject: [PATCH 699/785] OP-3698 - implemented timout or Webpublisher's PS processing --- openpype/lib/remote_publish.py | 29 +++++++++++++++++++++-------- openpype/pype_commands.py | 21 +++++++++++++++++++-- 2 files changed, 40 insertions(+), 10 deletions(-) diff --git a/openpype/lib/remote_publish.py b/openpype/lib/remote_publish.py index 38c6b07c5b..9409b72e39 100644 --- a/openpype/lib/remote_publish.py +++ b/openpype/lib/remote_publish.py @@ -1,4 +1,5 @@ import os +import sys from datetime import datetime import collections @@ -9,6 +10,8 @@ import pyblish.api from openpype.client.mongo import OpenPypeMongoConnection from openpype.lib.plugin_tools import parse_json +from openpype.lib.profiles_filtering import filter_profiles +from openpype.api import get_project_settings ERROR_STATUS = "error" IN_PROGRESS_STATUS = "in_progress" @@ -175,14 +178,8 @@ def publish_and_log(dbcon, _id, log, close_plugin_name=None, batch_id=None): ) -def fail_batch(_id, batches_in_progress, dbcon): - """Set current batch as failed as there are some stuck batches.""" - running_batches = [str(batch["_id"]) - for batch in batches_in_progress - if batch["_id"] != _id] - msg = "There are still running batches {}\n". \ - format("\n".join(running_batches)) - msg += "Ask admin to check them and reprocess current batch" +def fail_batch(_id, dbcon, msg): + """Set current batch as failed as there is some problem.""" dbcon.update_one( {"_id": _id}, {"$set": @@ -259,3 +256,19 @@ def get_task_data(batch_dir): "Cannot parse batch meta in {} folder".format(task_data)) return task_data + + +def get_timeout(project_name, host_name, task_type): + """Returns timeout(seconds) from Setting profile.""" + filter_data = { + "task_types": task_type, + "hosts": host_name + } + timeout_profiles = (get_project_settings(project_name)["webpublisher"] + ["timeout_profiles"]) + matching_item = filter_profiles(timeout_profiles, filter_data) + timeout = sys.maxsize + if matching_item: + timeout = matching_item["timeout"] + + return timeout diff --git a/openpype/pype_commands.py b/openpype/pype_commands.py index 124eacbe39..0e217ad8a1 100644 --- a/openpype/pype_commands.py +++ b/openpype/pype_commands.py @@ -15,6 +15,7 @@ from openpype.lib.remote_publish import ( fail_batch, find_variant_key, get_task_data, + get_timeout, IN_PROGRESS_STATUS ) @@ -222,10 +223,17 @@ class PypeCommands: batches_in_progress = list(dbcon.find({"status": IN_PROGRESS_STATUS})) if len(batches_in_progress) > 1: - fail_batch(_id, batches_in_progress, dbcon) + running_batches = [str(batch["_id"]) + for batch in batches_in_progress + if batch["_id"] != _id] + msg = "There are still running batches {}\n". \ + format("\n".join(running_batches)) + msg += "Ask admin to check them and reprocess current batch" + fail_batch(_id, dbcon, msg) print("Another batch running, probably stuck, ask admin for help") - asset, task_name, _ = get_batch_asset_task_info(task_data["context"]) + asset, task_name, task_type = get_batch_asset_task_info( + task_data["context"]) application_manager = ApplicationManager() found_variant_key = find_variant_key(application_manager, host_name) @@ -269,8 +277,17 @@ class PypeCommands: launched_app = application_manager.launch(app_name, **data) + timeout = get_timeout(project, host_name, task_type) + + time_start = time.time() while launched_app.poll() is None: time.sleep(0.5) + if time.time() - time_start > timeout: + launched_app.terminate() + msg = "Timeout reached" + fail_batch(_id, dbcon, msg) + raise ValueError("Timeout reached") + @staticmethod def remotepublish(project, batch_path, user_email, targets=None): From e48eea04e6785a5ca96627bd32d60d5b2f3dbf90 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 4 Aug 2022 15:38:10 +0200 Subject: [PATCH 700/785] OP-3698 - refactor - renamed variables --- openpype/pype_commands.py | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/openpype/pype_commands.py b/openpype/pype_commands.py index 0e217ad8a1..c18ca218c6 100644 --- a/openpype/pype_commands.py +++ b/openpype/pype_commands.py @@ -171,7 +171,7 @@ class PypeCommands: log.info("Publish finished.") @staticmethod - def remotepublishfromapp(project, batch_path, host_name, + def remotepublishfromapp(project_name, batch_path, host_name, user_email, targets=None): """Opens installed variant of 'host' and run remote publish there. @@ -190,8 +190,8 @@ class PypeCommands: Runs publish process as user would, in automatic fashion. Args: - project (str): project to publish (only single context is expected - per call of remotepublish + project_name (str): project to publish (only single context is + expected per call of remotepublish batch_path (str): Path batch folder. Contains subfolders with resources (workfile, another subfolder 'renders' etc.) host_name (str): 'photoshop' @@ -232,7 +232,7 @@ class PypeCommands: fail_batch(_id, dbcon, msg) print("Another batch running, probably stuck, ask admin for help") - asset, task_name, task_type = get_batch_asset_task_info( + asset_name, task_name, task_type = get_batch_asset_task_info( task_data["context"]) application_manager = ApplicationManager() @@ -241,8 +241,8 @@ class PypeCommands: # must have for proper launch of app env = get_app_environments_for_context( - project, - asset, + project_name, + asset_name, task_name, app_name ) @@ -270,14 +270,14 @@ class PypeCommands: data = { "last_workfile_path": workfile_path, "start_last_workfile": True, - "project_name": project, - "asset_name": asset, + "project_name": project_name, + "asset_name": asset_name, "task_name": task_name } launched_app = application_manager.launch(app_name, **data) - timeout = get_timeout(project, host_name, task_type) + timeout = get_timeout(project_name, host_name, task_type) time_start = time.time() while launched_app.poll() is None: From f6899fad62aa430eb1d36e18f2e170d8aba9e25e Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 4 Aug 2022 15:40:47 +0200 Subject: [PATCH 701/785] OP-3698 - updated docstring Removed raise, already in function Added default to 1 hour --- openpype/lib/remote_publish.py | 9 ++++++--- openpype/pype_commands.py | 2 -- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/openpype/lib/remote_publish.py b/openpype/lib/remote_publish.py index 9409b72e39..b4b05c053b 100644 --- a/openpype/lib/remote_publish.py +++ b/openpype/lib/remote_publish.py @@ -1,5 +1,4 @@ import os -import sys from datetime import datetime import collections @@ -179,7 +178,11 @@ def publish_and_log(dbcon, _id, log, close_plugin_name=None, batch_id=None): def fail_batch(_id, dbcon, msg): - """Set current batch as failed as there is some problem.""" + """Set current batch as failed as there is some problem. + + Raises: + ValueError + """ dbcon.update_one( {"_id": _id}, {"$set": @@ -267,7 +270,7 @@ def get_timeout(project_name, host_name, task_type): timeout_profiles = (get_project_settings(project_name)["webpublisher"] ["timeout_profiles"]) matching_item = filter_profiles(timeout_profiles, filter_data) - timeout = sys.maxsize + timeout = 3600 if matching_item: timeout = matching_item["timeout"] diff --git a/openpype/pype_commands.py b/openpype/pype_commands.py index c18ca218c6..a447aa916b 100644 --- a/openpype/pype_commands.py +++ b/openpype/pype_commands.py @@ -286,8 +286,6 @@ class PypeCommands: launched_app.terminate() msg = "Timeout reached" fail_batch(_id, dbcon, msg) - raise ValueError("Timeout reached") - @staticmethod def remotepublish(project, batch_path, user_email, targets=None): From 7f6e6649cd217997bea383bdbf1a351362717bec Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 4 Aug 2022 17:04:53 +0200 Subject: [PATCH 702/785] let ffmpeg handle scales by forcing original aspect ratio --- openpype/plugins/publish/extract_review.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/openpype/plugins/publish/extract_review.py b/openpype/plugins/publish/extract_review.py index 533a87acb4..fe5d34b1a1 100644 --- a/openpype/plugins/publish/extract_review.py +++ b/openpype/plugins/publish/extract_review.py @@ -1390,9 +1390,11 @@ class ExtractReview(pyblish.api.InstancePlugin): self.log.debug("height_half_pad: `{}`".format(height_half_pad)) filters.extend([ - "scale={}x{}:flags=lanczos".format( - width_scale, height_scale - ), + ( + "scale={}x{}" + ":flags=lanczos" + ":force_original_aspect_ratio=decrease" + ).format(output_width, output_height), "pad={}:{}:{}:{}:{}".format( output_width, output_height, width_half_pad, height_half_pad, From a0fed43787fab4b945ea850235dde2270d0203b9 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 4 Aug 2022 17:07:54 +0200 Subject: [PATCH 703/785] don't even calculate the padded part --- openpype/plugins/publish/extract_review.py | 23 +--------------------- 1 file changed, 1 insertion(+), 22 deletions(-) diff --git a/openpype/plugins/publish/extract_review.py b/openpype/plugins/publish/extract_review.py index fe5d34b1a1..7442d3aacb 100644 --- a/openpype/plugins/publish/extract_review.py +++ b/openpype/plugins/publish/extract_review.py @@ -1369,35 +1369,14 @@ class ExtractReview(pyblish.api.InstancePlugin): or input_width != output_width or pixel_aspect != 1 ): - if input_res_ratio < output_res_ratio: - self.log.debug( - "Input's resolution ratio is lower then output's" - ) - width_scale = int(input_width * scale_factor_by_height) - width_half_pad = int((output_width - width_scale) / 2) - height_scale = output_height - height_half_pad = 0 - else: - self.log.debug("Input is heigher then output") - width_scale = output_width - width_half_pad = 0 - height_scale = int(input_height * scale_factor_by_width) - height_half_pad = int((output_height - height_scale) / 2) - - self.log.debug("width_scale: `{}`".format(width_scale)) - self.log.debug("width_half_pad: `{}`".format(width_half_pad)) - self.log.debug("height_scale: `{}`".format(height_scale)) - self.log.debug("height_half_pad: `{}`".format(height_half_pad)) - filters.extend([ ( "scale={}x{}" ":flags=lanczos" ":force_original_aspect_ratio=decrease" ).format(output_width, output_height), - "pad={}:{}:{}:{}:{}".format( + "pad={}:{}:(ow-iw)/2:(oh-ih)/2:{}".format( output_width, output_height, - width_half_pad, height_half_pad, overscan_color_value ), "setsar=1" From b7c377e42288f0c7cdab55dd5d0ce6ac6e46499d Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 4 Aug 2022 18:07:01 +0200 Subject: [PATCH 704/785] handle create, update and delete operations properly --- .../event_push_frame_values_to_task.py | 57 ++++++++++++++++--- 1 file changed, 48 insertions(+), 9 deletions(-) diff --git a/openpype/modules/ftrack/event_handlers_server/event_push_frame_values_to_task.py b/openpype/modules/ftrack/event_handlers_server/event_push_frame_values_to_task.py index 0914933de4..0895967fb1 100644 --- a/openpype/modules/ftrack/event_handlers_server/event_push_frame_values_to_task.py +++ b/openpype/modules/ftrack/event_handlers_server/event_push_frame_values_to_task.py @@ -380,33 +380,49 @@ class PushFrameValuesToTaskEvent(BaseEvent): uncommited_changes = False for idx, item in enumerate(changes): new_value = item["new_value"] + old_value = item["old_value"] attr_id = item["attr_id"] entity_id = item["entity_id"] attr_key = item["attr_key"] - entity_key = collections.OrderedDict() - entity_key["configuration_id"] = attr_id - entity_key["entity_id"] = entity_id + entity_key = collections.OrderedDict(( + ("configuration_id", attr_id), + ("entity_id", entity_id) + )) self._cached_changes.append({ "attr_key": attr_key, "entity_id": entity_id, "value": new_value, "time": datetime.datetime.now() }) + old_value_is_set = ( + old_value is not ftrack_api.symbol.NOT_SET + and old_value is not None + ) if new_value is None: + if not old_value_is_set: + continue op = ftrack_api.operation.DeleteEntityOperation( "CustomAttributeValue", entity_key ) - else: + + elif old_value_is_set: op = ftrack_api.operation.UpdateEntityOperation( - "ContextCustomAttributeValue", + "CustomAttributeValue", entity_key, "value", - ftrack_api.symbol.NOT_SET, + old_value, new_value ) + else: + op = ftrack_api.operation.CreateEntityOperation( + "CustomAttributeValue", + entity_key, + {"value": new_value} + ) + session.recorded_operations.push(op) self.log.info(( "Changing Custom Attribute \"{}\" to value" @@ -550,7 +566,11 @@ class PushFrameValuesToTaskEvent(BaseEvent): attr_ids = set(attr_id_to_key.keys()) current_values_by_id = self.get_current_values( - session, attr_ids, entity_ids, task_entity_ids, hier_attrs + session, + attr_ids, + entity_ids, + task_entity_ids, + hier_attrs ) changes = [] @@ -567,7 +587,12 @@ class PushFrameValuesToTaskEvent(BaseEvent): # Convert new value from string new_value = values.get(attr_key) - if new_value is not None and old_value is not None: + new_value_is_valid = ( + old_value is not ftrack_api.symbol.NOT_SET + and new_value is not None + ) + + if new_value is not None and new_value_is_valid: try: new_value = type(old_value)(new_value) except Exception: @@ -581,6 +606,7 @@ class PushFrameValuesToTaskEvent(BaseEvent): changes.append({ "new_value": new_value, "attr_id": attr_id, + "old_value": old_value, "entity_id": entity_id, "attr_key": attr_key }) @@ -645,15 +671,28 @@ class PushFrameValuesToTaskEvent(BaseEvent): return interesting_data, changed_keys_by_object_id def get_current_values( - self, session, attr_ids, entity_ids, task_entity_ids, hier_attrs + self, + session, + attr_ids, + entity_ids, + task_entity_ids, + hier_attrs ): current_values_by_id = {} if not attr_ids or not entity_ids: return current_values_by_id + for entity_id in entity_ids: + current_values_by_id[entity_id] = {} + for attr_id in attr_ids: + current_values_by_id[entity_id][attr_id] = ( + ftrack_api.symbol.NOT_SET + ) + values = query_custom_attributes( session, attr_ids, entity_ids, True ) + for item in values: entity_id = item["entity_id"] attr_id = item["configuration_id"] From 7e2f7efa64b7b7869f97a86f065532748582770e Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 4 Aug 2022 18:07:07 +0200 Subject: [PATCH 705/785] handle new added entities --- .../event_push_frame_values_to_task.py | 181 ++++++++++++++++-- 1 file changed, 166 insertions(+), 15 deletions(-) diff --git a/openpype/modules/ftrack/event_handlers_server/event_push_frame_values_to_task.py b/openpype/modules/ftrack/event_handlers_server/event_push_frame_values_to_task.py index 0895967fb1..dc76920a57 100644 --- a/openpype/modules/ftrack/event_handlers_server/event_push_frame_values_to_task.py +++ b/openpype/modules/ftrack/event_handlers_server/event_push_frame_values_to_task.py @@ -1,10 +1,11 @@ import collections import datetime +import copy import ftrack_api from openpype_modules.ftrack.lib import ( BaseEvent, - query_custom_attributes + query_custom_attributes, ) @@ -124,10 +125,15 @@ class PushFrameValuesToTaskEvent(BaseEvent): # Separate value changes and task parent changes _entities_info = [] + added_entities = [] + added_entity_ids = set() task_parent_changes = [] for entity_info in entities_info: if entity_info["entity_type"].lower() == "task": task_parent_changes.append(entity_info) + elif entity_info.get("action") == "add": + added_entities.append(entity_info) + added_entity_ids.add(entity_info["entityId"]) else: _entities_info.append(entity_info) entities_info = _entities_info @@ -136,6 +142,13 @@ class PushFrameValuesToTaskEvent(BaseEvent): interesting_data, changed_keys_by_object_id = self.filter_changes( session, event, entities_info, interest_attributes ) + self.interesting_data_for_added( + session, + added_entities, + interest_attributes, + interesting_data, + changed_keys_by_object_id + ) if not interesting_data and not task_parent_changes: return @@ -151,9 +164,13 @@ class PushFrameValuesToTaskEvent(BaseEvent): # - it is a complex way how to find out if interesting_data: self.process_attribute_changes( - session, object_types_by_name, - interesting_data, changed_keys_by_object_id, - interest_entity_types, interest_attributes + session, + object_types_by_name, + interesting_data, + changed_keys_by_object_id, + interest_entity_types, + interest_attributes, + added_entity_ids ) if task_parent_changes: @@ -163,8 +180,12 @@ class PushFrameValuesToTaskEvent(BaseEvent): ) def process_task_parent_change( - self, session, object_types_by_name, task_parent_changes, - interest_entity_types, interest_attributes + self, + session, + object_types_by_name, + task_parent_changes, + interest_entity_types, + interest_attributes ): """Push custom attribute values if task parent has changed. @@ -176,6 +197,7 @@ class PushFrameValuesToTaskEvent(BaseEvent): real hierarchical value and non hierarchical custom attribute value should be set to hierarchical value. """ + # Store task ids which were created or moved under parent with entity # type defined in settings (interest_entity_types). task_ids = set() @@ -448,9 +470,14 @@ class PushFrameValuesToTaskEvent(BaseEvent): self.log.warning("Changing of values failed.", exc_info=True) def process_attribute_changes( - self, session, object_types_by_name, - interesting_data, changed_keys_by_object_id, - interest_entity_types, interest_attributes + self, + session, + object_types_by_name, + interesting_data, + changed_keys_by_object_id, + interest_entity_types, + interest_attributes, + added_entity_ids ): # Prepare task object id task_object_id = object_types_by_name["task"]["id"] @@ -538,15 +565,26 @@ class PushFrameValuesToTaskEvent(BaseEvent): parent_id_by_task_id[task_id] = task_entity["parent_id"] self.finalize_attribute_changes( - session, interesting_data, - changed_keys, attrs_by_obj_id, hier_attrs, - task_entity_ids, parent_id_by_task_id + session, + interesting_data, + changed_keys, + attrs_by_obj_id, + hier_attrs, + task_entity_ids, + parent_id_by_task_id, + added_entity_ids ) def finalize_attribute_changes( - self, session, interesting_data, - changed_keys, attrs_by_obj_id, hier_attrs, - task_entity_ids, parent_id_by_task_id + self, + session, + interesting_data, + changed_keys, + attrs_by_obj_id, + hier_attrs, + task_entity_ids, + parent_id_by_task_id, + added_entity_ids ): attr_id_to_key = {} for attr_confs in attrs_by_obj_id.values(): @@ -580,7 +618,11 @@ class PushFrameValuesToTaskEvent(BaseEvent): parent_id = entity_id values = interesting_data[parent_id] + added_entity = entity_id in added_entity_ids for attr_id, old_value in current_values.items(): + if added_entity and attr_id in hier_attrs: + continue + attr_key = attr_id_to_key.get(attr_id) if not attr_key: continue @@ -591,6 +633,8 @@ class PushFrameValuesToTaskEvent(BaseEvent): old_value is not ftrack_api.symbol.NOT_SET and new_value is not None ) + if added_entity and not new_value_is_valid: + continue if new_value is not None and new_value_is_valid: try: @@ -625,6 +669,7 @@ class PushFrameValuesToTaskEvent(BaseEvent): interesting_data = {} changed_keys_by_object_id = {} + for entity_info in entities_info: # Care only about changes if specific keys entity_changes = {} @@ -670,6 +715,100 @@ class PushFrameValuesToTaskEvent(BaseEvent): return interesting_data, changed_keys_by_object_id + def interesting_data_for_added( + self, + session, + added_entities, + interest_attributes, + interesting_data, + changed_keys_by_object_id + ): + if not added_entities or not interest_attributes: + return + + object_type_ids = set() + entity_ids = set() + all_entity_ids = set() + object_id_by_entity_id = {} + project_id = None + entity_ids_by_parent_id = collections.defaultdict(set) + for entity_info in added_entities: + object_id = entity_info["objectTypeId"] + entity_id = entity_info["entityId"] + object_type_ids.add(object_id) + entity_ids.add(entity_id) + object_id_by_entity_id[entity_id] = object_id + + for item in entity_info["parents"]: + entity_id = item["entityId"] + all_entity_ids.add(entity_id) + parent_id = item["parentId"] + if not parent_id: + project_id = entity_id + else: + entity_ids_by_parent_id[parent_id].add(entity_id) + + hier_attrs = self.get_hierarchical_configurations( + session, interest_attributes + ) + if not hier_attrs: + return + + hier_attrs_key_by_id = { + attr_conf["id"]: attr_conf["key"] + for attr_conf in hier_attrs + } + default_values_by_key = { + attr_conf["key"]: attr_conf["default"] + for attr_conf in hier_attrs + } + + values = query_custom_attributes( + session, list(hier_attrs_key_by_id.keys()), all_entity_ids, True + ) + values_per_entity_id = {} + for entity_id in all_entity_ids: + values_per_entity_id[entity_id] = {} + for attr_name in interest_attributes: + values_per_entity_id[entity_id][attr_name] = None + + for item in values: + entity_id = item["entity_id"] + key = hier_attrs_key_by_id[item["configuration_id"]] + values_per_entity_id[entity_id][key] = item["value"] + + fill_queue = collections.deque() + fill_queue.append((project_id, default_values_by_key)) + while fill_queue: + item = fill_queue.popleft() + entity_id, values_by_key = item + entity_values = values_per_entity_id[entity_id] + new_values_by_key = copy.deepcopy(values_by_key) + for key, value in values_by_key.items(): + current_value = entity_values[key] + if current_value is None: + entity_values[key] = value + else: + new_values_by_key[key] = current_value + + for child_id in entity_ids_by_parent_id[entity_id]: + fill_queue.append((child_id, new_values_by_key)) + + for entity_id in entity_ids: + entity_changes = {} + for key, value in values_per_entity_id[entity_id].items(): + if value is not None: + entity_changes[key] = value + + if not entity_changes: + continue + + interesting_data[entity_id] = entity_changes + object_id = object_id_by_entity_id[entity_id] + if object_id not in changed_keys_by_object_id: + changed_keys_by_object_id[object_id] = set() + changed_keys_by_object_id[object_id] |= set(entity_changes.keys()) + def get_current_values( self, session, @@ -738,6 +877,18 @@ class PushFrameValuesToTaskEvent(BaseEvent): output[obj_id][attr["key"]] = attr["id"] return output, hiearchical + def get_hierarchical_configurations(self, session, interest_attributes): + hier_attr_query = ( + "select id, key, object_type_id, is_hierarchical, default" + " from CustomAttributeConfiguration" + " where key in ({}) and is_hierarchical is true" + ) + if not interest_attributes: + return [] + return list(session.query(hier_attr_query.format( + self.join_query_keys(interest_attributes), + )).all()) + def register(session): PushFrameValuesToTaskEvent(session).register() From 34dff12fb35b898f2c06c08b97f59a95c33063b5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Thu, 4 Aug 2022 19:13:48 +0200 Subject: [PATCH 706/785] :bug: fix build directory on darwin --- tools/build_dependencies.py | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/tools/build_dependencies.py b/tools/build_dependencies.py index d3566dd289..d186ead881 100644 --- a/tools/build_dependencies.py +++ b/tools/build_dependencies.py @@ -29,6 +29,7 @@ import shutil import blessed import enlighten import time +import re term = blessed.Terminal() @@ -52,7 +53,7 @@ def _print(msg: str, type: int = 0) -> None: else: header = term.darkolivegreen3("--- ") - print("{}{}".format(header, msg)) + print(f"{header}{msg}") def count_folders(path: Path) -> int: @@ -95,16 +96,22 @@ assert site_pkg, "No venv site-packages are found." _print(f"Working with: {site_pkg}", 2) openpype_root = Path(os.path.dirname(__file__)).parent +version = {} +with open(openpype_root / "openpype" / "version.py") as fp: + exec(fp.read(), version) + +version_match = re.search(r"(\d+\.\d+.\d+).*", version["__version__"]) +openpype_version = version_match[1] # create full path if platform.system().lower() == "darwin": build_dir = openpype_root.joinpath( "build", - "OpenPype.app", + f"OpenPype {openpype_version}.app", "Contents", "MacOS") else: - build_subdir = "exe.{}-{}".format(get_platform(), sys.version[0:3]) + build_subdir = f"exe.{get_platform()}-{sys.version[:3]}" build_dir = openpype_root / "build" / build_subdir _print(f"Using build at {build_dir}", 2) From 08a9cb207385a0906cc56d063c19de3aa88eb51d Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 5 Aug 2022 10:08:07 +0200 Subject: [PATCH 707/785] fix typo --- openpype/lib/plugin_tools.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/lib/plugin_tools.py b/openpype/lib/plugin_tools.py index c94d1251fc..060db94ae0 100644 --- a/openpype/lib/plugin_tools.py +++ b/openpype/lib/plugin_tools.py @@ -57,7 +57,7 @@ def deprecated(new_destination): stacklevel=4 ) return decorated_func(*args, **kwargs) - return wrapper- + return wrapper if func is None: return _decorator From 6d2a869b2ebdb9a46545a1e650fe8c009f93fed3 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 5 Aug 2022 10:08:20 +0200 Subject: [PATCH 708/785] discover loader plugins can expect project name --- openpype/pipeline/load/plugins.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/openpype/pipeline/load/plugins.py b/openpype/pipeline/load/plugins.py index 233aace035..7438b3230f 100644 --- a/openpype/pipeline/load/plugins.py +++ b/openpype/pipeline/load/plugins.py @@ -2,6 +2,7 @@ import os import logging from openpype.settings import get_system_settings, get_project_settings +from openpype.pipeline import legacy_io from openpype.pipeline.plugin_discover import ( discover, register_plugin, @@ -151,9 +152,10 @@ class SubsetLoaderPlugin(LoaderPlugin): pass -def discover_loader_plugins(): +def discover_loader_plugins(project_name=None): plugins = discover(LoaderPlugin) - project_name = os.environ.get("AVALON_PROJECT") + if not project_name: + project_name = legacy_io.active_project() system_settings = get_system_settings() project_settings = get_project_settings(project_name) for plugin in plugins: From 0b24237bfe178270e062e3828e804edecfe6eb23 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 5 Aug 2022 10:08:54 +0200 Subject: [PATCH 709/785] loader pass project name to discover loader plugins --- openpype/tools/loader/widgets.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/openpype/tools/loader/widgets.py b/openpype/tools/loader/widgets.py index 13e18b3757..48c038418a 100644 --- a/openpype/tools/loader/widgets.py +++ b/openpype/tools/loader/widgets.py @@ -434,7 +434,8 @@ class SubsetWidget(QtWidgets.QWidget): # Get all representation->loader combinations available for the # index under the cursor, so we can list the user the options. - available_loaders = discover_loader_plugins() + project_name = self.dbcon.active_project() + available_loaders = discover_loader_plugins(project_name) if self.tool_name: available_loaders = lib.remove_tool_name_from_loaders( available_loaders, self.tool_name @@ -1330,7 +1331,8 @@ class RepresentationWidget(QtWidgets.QWidget): selected_side = self._get_selected_side(point_index, rows) # Get all representation->loader combinations available for the # index under the cursor, so we can list the user the options. - available_loaders = discover_loader_plugins() + project_name = self.dbcon.active_project() + available_loaders = discover_loader_plugins(project_name) filtered_loaders = [] for loader in available_loaders: From cbfa9015b1f7a5d134a6ea436db587d8251fc324 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 5 Aug 2022 10:45:35 +0200 Subject: [PATCH 710/785] catch failed applied settings --- openpype/pipeline/create/creator_plugins.py | 14 +++++++++++++- openpype/pipeline/load/plugins.py | 13 ++++++++++++- 2 files changed, 25 insertions(+), 2 deletions(-) diff --git a/openpype/pipeline/create/creator_plugins.py b/openpype/pipeline/create/creator_plugins.py index 4a1630d8ef..9a5d559774 100644 --- a/openpype/pipeline/create/creator_plugins.py +++ b/openpype/pipeline/create/creator_plugins.py @@ -437,12 +437,24 @@ def discover_creator_plugins(): def discover_legacy_creator_plugins(): + from openpype.lib import Logger + + log = Logger.get_logger("CreatorDiscover") + plugins = discover(LegacyCreator) project_name = os.environ.get("AVALON_PROJECT") system_settings = get_system_settings() project_settings = get_project_settings(project_name) for plugin in plugins: - plugin.apply_settings(project_settings, system_settings) + try: + plugin.apply_settings(project_settings, system_settings) + except Exception: + log.warning( + "Failed to apply settings to loader {}".format( + plugin.__name__ + ), + exc_info=True + ) return plugins diff --git a/openpype/pipeline/load/plugins.py b/openpype/pipeline/load/plugins.py index 7438b3230f..8cba8d8217 100644 --- a/openpype/pipeline/load/plugins.py +++ b/openpype/pipeline/load/plugins.py @@ -153,13 +153,24 @@ class SubsetLoaderPlugin(LoaderPlugin): def discover_loader_plugins(project_name=None): + from openpype.lib import Logger + + log = Logger.get_logger("LoaderDiscover") plugins = discover(LoaderPlugin) if not project_name: project_name = legacy_io.active_project() system_settings = get_system_settings() project_settings = get_project_settings(project_name) for plugin in plugins: - plugin.apply_settings(project_settings, system_settings) + try: + plugin.apply_settings(project_settings, system_settings) + except Exception: + log.warning( + "Failed to apply settings to loader {}".format( + plugin.__name__ + ), + exc_info=True + ) return plugins From e014deb411ebc4daaf031df28927b136fedaed56 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 5 Aug 2022 12:20:22 +0200 Subject: [PATCH 711/785] small variable name changes --- openpype/client/operations.py | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/openpype/client/operations.py b/openpype/client/operations.py index dfb1d8c4dd..69d1eb2bb6 100644 --- a/openpype/client/operations.py +++ b/openpype/client/operations.py @@ -55,7 +55,7 @@ def new_project_document( "_id": _create_or_convert_to_mongo_id(entity_id), "name": project_name, "type": CURRENT_PROJECT_SCHEMA, - "data": data, + "entity_data": data, "config": config } @@ -290,6 +290,10 @@ class AbstractOperation(object): def to_data(self): """Convert opration to data that can be converted to json or others. + Warning: + Current state returns ObjectId objects which cannot be parsed by + json. + Returns: Dict[str, Any]: Description of operation. """ @@ -412,16 +416,16 @@ class UpdateOperation(AbstractOperation): ) def to_data(self): - fields = {} + changes = {} for key, value in self._update_data.items(): if value is REMOVED_VALUE: value = None - fields[key] = value + changes[key] = value output = super(UpdateOperation, self).to_data() output.update({ - "entity_id": str(self.entity_id), - "fields": fields + "entity_id": self.entity_id, + "changes": changes }) return output From fa7b7d67f94b7f8dca87088034204f3dc6f1a03f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Fri, 5 Aug 2022 16:29:13 +0200 Subject: [PATCH 712/785] :bug: fix aov separator in redshift --- openpype/hosts/maya/api/lib_renderproducts.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/maya/api/lib_renderproducts.py b/openpype/hosts/maya/api/lib_renderproducts.py index c145f92f91..295791576d 100644 --- a/openpype/hosts/maya/api/lib_renderproducts.py +++ b/openpype/hosts/maya/api/lib_renderproducts.py @@ -963,7 +963,7 @@ class RenderProductsRedshift(ARenderProducts): """ prefix = super(RenderProductsRedshift, self).get_renderer_prefix() - prefix = "{}{}".format(prefix, self.aov_separator) + prefix = "{}{}".format(prefix, self.layer_data["aov_separator"]) return prefix def get_render_products(self): From 10ff3562739d260cf0ad13817c5ee2fd4a3a7636 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Fri, 5 Aug 2022 16:44:30 +0200 Subject: [PATCH 713/785] :recycle: refactor the fix --- openpype/hosts/maya/api/lib_renderproducts.py | 65 ++++++++++++------- 1 file changed, 42 insertions(+), 23 deletions(-) diff --git a/openpype/hosts/maya/api/lib_renderproducts.py b/openpype/hosts/maya/api/lib_renderproducts.py index 295791576d..1e883ea43f 100644 --- a/openpype/hosts/maya/api/lib_renderproducts.py +++ b/openpype/hosts/maya/api/lib_renderproducts.py @@ -309,6 +309,42 @@ class ARenderProducts: return lib.get_attr_in_layer(plug, layer=self.layer) + @staticmethod + def extract_separator(file_prefix): + """Extract AOV separator character from the prefix. + + Default behavior extracts the part between + last occurrences of and + + Todo: + This code also triggers for V-Ray which overrides it explicitly + so this code will invalidly debug log it couldn't extract the + AOV separator even though it does set it in RenderProductsVray. + + Args: + file_prefix (str): File prefix with tokens. + + Returns: + str or None: prefix character if it can be extracted. + """ + layer_tokens = ["", ""] + aov_tokens = ["", ""] + + def match_last(tokens, text): + """regex match the last occurence from a list of tokens""" + pattern = "(?:.*)({})".format("|".join(tokens)) + return re.search(pattern, text, re.IGNORECASE) + + layer_match = match_last(layer_tokens, file_prefix) + aov_match = match_last(aov_tokens, file_prefix) + separator = None + if layer_match and aov_match: + matches = sorted((layer_match, aov_match), + key=lambda match: match.end(1)) + separator = file_prefix[matches[0].end(1):matches[1].start(1)] + return separator + + def _get_layer_data(self): # type: () -> LayerMetadata # ______________________________________________ @@ -317,7 +353,7 @@ class ARenderProducts: # ____________________/ _, scene_basename = os.path.split(cmds.file(q=True, loc=True)) scene_name, _ = os.path.splitext(scene_basename) - + kwargs = {} file_prefix = self.get_renderer_prefix() # If the Render Layer belongs to a Render Setup layer then the @@ -332,26 +368,8 @@ class ARenderProducts: # defaultRenderLayer renders as masterLayer layer_name = "masterLayer" - # AOV separator - default behavior extracts the part between - # last occurences of and - # todo: This code also triggers for V-Ray which overrides it explicitly - # so this code will invalidly debug log it couldn't extract the - # aov separator even though it does set it in RenderProductsVray - layer_tokens = ["", ""] - aov_tokens = ["", ""] - - def match_last(tokens, text): - """regex match the last occurence from a list of tokens""" - pattern = "(?:.*)({})".format("|".join(tokens)) - return re.search(pattern, text, re.IGNORECASE) - - layer_match = match_last(layer_tokens, file_prefix) - aov_match = match_last(aov_tokens, file_prefix) - kwargs = {} - if layer_match and aov_match: - matches = sorted((layer_match, aov_match), - key=lambda match: match.end(1)) - separator = file_prefix[matches[0].end(1):matches[1].start(1)] + separator = self.extract_separator(file_prefix) + if separator: kwargs["aov_separator"] = separator else: log.debug("Couldn't extract aov separator from " @@ -962,8 +980,9 @@ class RenderProductsRedshift(ARenderProducts): :func:`ARenderProducts.get_renderer_prefix()` """ - prefix = super(RenderProductsRedshift, self).get_renderer_prefix() - prefix = "{}{}".format(prefix, self.layer_data["aov_separator"]) + file_prefix = super(RenderProductsRedshift, self).get_renderer_prefix() + separator = self.extract_separator(file_prefix) + prefix = "{}{}".format(file_prefix, separator or "_") return prefix def get_render_products(self): From 401a04c767eff76a8981a1371c36f2ec36fc9d9d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Fri, 5 Aug 2022 17:14:10 +0200 Subject: [PATCH 714/785] :bug: fix missing variable and handle unset Settings value --- openpype/hosts/maya/plugins/publish/collect_render.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/collect_render.py b/openpype/hosts/maya/plugins/publish/collect_render.py index d1e87c95bb..e6fc8a01e5 100644 --- a/openpype/hosts/maya/plugins/publish/collect_render.py +++ b/openpype/hosts/maya/plugins/publish/collect_render.py @@ -205,7 +205,7 @@ class CollectMayaRender(pyblish.api.ContextPlugin): .get('maya')\ .get('create')\ .get('CreateRender')\ - .get('default_render_image_folder') + .get('default_render_image_folder') or "" # replace relative paths with absolute. Render products are # returned as list of dictionaries. publish_meta_path = None @@ -318,7 +318,7 @@ class CollectMayaRender(pyblish.api.ContextPlugin): "useReferencedAovs": render_instance.data.get( "useReferencedAovs") or render_instance.data.get( "vrayUseReferencedAovs") or False, - "aovSeparator": aov_separator + "aovSeparator": layer_render_products.layer_data.aov_separator # noqa: E501 } # Collect Deadline url if Deadline module is enabled From 5bd2d1d3c865510e7c4c8528f579ba6ca0d90f18 Mon Sep 17 00:00:00 2001 From: OpenPype Date: Sat, 6 Aug 2022 03:45:37 +0000 Subject: [PATCH 715/785] [Automated] Bump version --- CHANGELOG.md | 36 +++++++++++++++--------------------- openpype/version.py | 2 +- pyproject.toml | 2 +- 3 files changed, 17 insertions(+), 23 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 2c9671c8b8..15a120ec2a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,35 +1,45 @@ # Changelog -## [3.12.3-nightly.2](https://github.com/pypeclub/OpenPype/tree/HEAD) +## [3.12.3-nightly.3](https://github.com/pypeclub/OpenPype/tree/HEAD) [Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.12.2...HEAD) -**🆕 New features** - -- Traypublisher: simple editorial publishing [\#3492](https://github.com/pypeclub/OpenPype/pull/3492) - **🚀 Enhancements** +- Ftrack: Comment template can contain optional keys [\#3615](https://github.com/pypeclub/OpenPype/pull/3615) +- Ftrack: Add more metadata to ftrack components [\#3612](https://github.com/pypeclub/OpenPype/pull/3612) +- General: Add context to pyblish context [\#3594](https://github.com/pypeclub/OpenPype/pull/3594) - Kitsu: Shot&Sequence name with prefix over appends [\#3593](https://github.com/pypeclub/OpenPype/pull/3593) - Photoshop: implemented {layer} placeholder in subset template [\#3591](https://github.com/pypeclub/OpenPype/pull/3591) - General: New Integrator small fixes [\#3583](https://github.com/pypeclub/OpenPype/pull/3583) **🐛 Bug fixes** +- Ftrack: Sync hierarchical attributes can handle new created entities [\#3621](https://github.com/pypeclub/OpenPype/pull/3621) +- General: Extract review aspect ratio scale is calculated by ffmpeg [\#3620](https://github.com/pypeclub/OpenPype/pull/3620) +- Maya: Fix types of default settings [\#3617](https://github.com/pypeclub/OpenPype/pull/3617) +- Integrator: Don't force to have dot before frame [\#3611](https://github.com/pypeclub/OpenPype/pull/3611) +- AfterEffects: refactored integrate doesnt work formulti frame publishes [\#3610](https://github.com/pypeclub/OpenPype/pull/3610) +- Maya look data contents fails with custom attribute on group [\#3607](https://github.com/pypeclub/OpenPype/pull/3607) - TrayPublisher: Fix wrong conflict merge [\#3600](https://github.com/pypeclub/OpenPype/pull/3600) - Bugfix: Add OCIO as submodule to prepare for handling `maketx` color space conversion. [\#3590](https://github.com/pypeclub/OpenPype/pull/3590) - Editorial publishing workflow improvements [\#3580](https://github.com/pypeclub/OpenPype/pull/3580) +- General: Update imports in start script [\#3579](https://github.com/pypeclub/OpenPype/pull/3579) - Nuke: render family integration consistency [\#3576](https://github.com/pypeclub/OpenPype/pull/3576) - Ftrack: Handle missing published path in integrator [\#3570](https://github.com/pypeclub/OpenPype/pull/3570) +- Nuke: publish existing frames with slate with correct range [\#3555](https://github.com/pypeclub/OpenPype/pull/3555) **🔀 Refactored code** +- General: Plugin settings handled by plugins [\#3623](https://github.com/pypeclub/OpenPype/pull/3623) - General: Use query functions in general code [\#3596](https://github.com/pypeclub/OpenPype/pull/3596) - General: Separate extraction of template data into more functions [\#3574](https://github.com/pypeclub/OpenPype/pull/3574) - General: Lib cleanup [\#3571](https://github.com/pypeclub/OpenPype/pull/3571) **Merged pull requests:** +- Webpublisher: timeout for PS studio processing [\#3619](https://github.com/pypeclub/OpenPype/pull/3619) +- Core: translated validate\_containers.py into New publisher style [\#3614](https://github.com/pypeclub/OpenPype/pull/3614) - Enable write color sets on animation publish automatically [\#3582](https://github.com/pypeclub/OpenPype/pull/3582) ## [3.12.2](https://github.com/pypeclub/OpenPype/tree/3.12.2) (2022-07-27) @@ -51,7 +61,6 @@ - Add pack and unpack convenience scripts [\#3502](https://github.com/pypeclub/OpenPype/pull/3502) - NewPublisher: Keep plugins with mismatch target in report [\#3498](https://github.com/pypeclub/OpenPype/pull/3498) - Nuke: load clip with options from settings [\#3497](https://github.com/pypeclub/OpenPype/pull/3497) -- TrayPublisher: implemented render\_mov\_batch [\#3486](https://github.com/pypeclub/OpenPype/pull/3486) **🐛 Bug fixes** @@ -60,7 +69,6 @@ - NewPublisher: Python 2 compatible html escape [\#3559](https://github.com/pypeclub/OpenPype/pull/3559) - Remove invalid submodules from `/vendor` [\#3557](https://github.com/pypeclub/OpenPype/pull/3557) - General: Remove hosts filter on integrator plugins [\#3556](https://github.com/pypeclub/OpenPype/pull/3556) -- Nuke: publish existing frames with slate with correct range [\#3555](https://github.com/pypeclub/OpenPype/pull/3555) - Settings: Clean default values of environments [\#3550](https://github.com/pypeclub/OpenPype/pull/3550) - Module interfaces: Fix import error [\#3547](https://github.com/pypeclub/OpenPype/pull/3547) - Workfiles tool: Show of tool and it's flags [\#3539](https://github.com/pypeclub/OpenPype/pull/3539) @@ -95,20 +103,6 @@ **🚀 Enhancements** - TrayPublisher: Added more options for grouping of instances [\#3494](https://github.com/pypeclub/OpenPype/pull/3494) -- NewPublisher: Align creator attributes from top to bottom [\#3487](https://github.com/pypeclub/OpenPype/pull/3487) -- NewPublisher: Added ability to use label of instance [\#3484](https://github.com/pypeclub/OpenPype/pull/3484) -- General: Creator Plugins have access to project [\#3476](https://github.com/pypeclub/OpenPype/pull/3476) -- General: Better arguments order in creator init [\#3475](https://github.com/pypeclub/OpenPype/pull/3475) - -**🐛 Bug fixes** - -- TrayPublisher: Keep use instance label in list view [\#3493](https://github.com/pypeclub/OpenPype/pull/3493) -- General: Extract review use first frame of input sequence [\#3491](https://github.com/pypeclub/OpenPype/pull/3491) -- General: Fix Plist loading for application launch [\#3485](https://github.com/pypeclub/OpenPype/pull/3485) -- Nuke: Workfile tools open on start [\#3479](https://github.com/pypeclub/OpenPype/pull/3479) -- New Publisher: Disabled context change allows creation [\#3478](https://github.com/pypeclub/OpenPype/pull/3478) -- General: thumbnail extractor fix [\#3474](https://github.com/pypeclub/OpenPype/pull/3474) -- Kitsu: bugfix with sync-service ans publish plugins [\#3473](https://github.com/pypeclub/OpenPype/pull/3473) ## [3.12.0](https://github.com/pypeclub/OpenPype/tree/3.12.0) (2022-06-28) diff --git a/openpype/version.py b/openpype/version.py index 636dff5930..3f1056249a 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.12.3-nightly.2" +__version__ = "3.12.3-nightly.3" diff --git a/pyproject.toml b/pyproject.toml index 9ab2fd4513..66aca5e5e8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "OpenPype" -version = "3.12.3-nightly.2" # OpenPype +version = "3.12.3-nightly.3" # OpenPype description = "Open VFX and Animation pipeline with support." authors = ["OpenPype Team "] license = "MIT License" From ed13f96a1222dbede0b8ea62268e2a8350d84ee6 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Mon, 8 Aug 2022 19:44:43 +0800 Subject: [PATCH 716/785] fix the bug of failing to extract look when UDIMs format used in AiImage --- openpype/hosts/maya/plugins/publish/extract_look.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_look.py b/openpype/hosts/maya/plugins/publish/extract_look.py index 0b26e922d5..bbd21cfa42 100644 --- a/openpype/hosts/maya/plugins/publish/extract_look.py +++ b/openpype/hosts/maya/plugins/publish/extract_look.py @@ -429,9 +429,14 @@ class ExtractLook(openpype.api.Extractor): # node doesn't have color space attribute color_space = "Raw" else: - if files_metadata[source]["color_space"] == "Raw": + try: + if files_metadata[source]["color_space"] == "Raw": # set color space to raw if we linearized it - color_space = "Raw" + color_space = "Raw" + except KeyError: + #set color space to Raw if the attribute of the color space is raw. + if cmds.getAttr(color_space_attr) == "Raw": + color_space = "Raw" # Remap file node filename to destination remap[color_space_attr] = color_space attr = resource["attribute"] From 13bc6cab8efca3d9038e76a7a6d7fb5e11663f57 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Mon, 8 Aug 2022 20:10:04 +0800 Subject: [PATCH 717/785] fix the bug of failing to extract the look with the UDIMs format in AiImage --- openpype/hosts/maya/plugins/publish/extract_look.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_look.py b/openpype/hosts/maya/plugins/publish/extract_look.py index bbd21cfa42..32724c64c1 100644 --- a/openpype/hosts/maya/plugins/publish/extract_look.py +++ b/openpype/hosts/maya/plugins/publish/extract_look.py @@ -430,11 +430,11 @@ class ExtractLook(openpype.api.Extractor): color_space = "Raw" else: try: - if files_metadata[source]["color_space"] == "Raw": + if files_metadata[source]["color_space"] == "Raw": # set color space to raw if we linearized it color_space = "Raw" except KeyError: - #set color space to Raw if the attribute of the color space is raw. + # set color space to Raw if the attribute of the color space is raw. if cmds.getAttr(color_space_attr) == "Raw": color_space = "Raw" # Remap file node filename to destination From 1a7164fa90be5e394ce994a07c0355937a4987c7 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Mon, 8 Aug 2022 20:11:07 +0800 Subject: [PATCH 718/785] fix the bug of failing to extract the look with the UDIMs format in AiImage --- openpype/hosts/maya/plugins/publish/extract_look.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_look.py b/openpype/hosts/maya/plugins/publish/extract_look.py index 32724c64c1..c6737c7215 100644 --- a/openpype/hosts/maya/plugins/publish/extract_look.py +++ b/openpype/hosts/maya/plugins/publish/extract_look.py @@ -434,7 +434,7 @@ class ExtractLook(openpype.api.Extractor): # set color space to raw if we linearized it color_space = "Raw" except KeyError: - # set color space to Raw if the attribute of the color space is raw. + # set color space to Raw if its attribute is raw. if cmds.getAttr(color_space_attr) == "Raw": color_space = "Raw" # Remap file node filename to destination From 38c35a87dea322e8fb81179cb40abd0549a905b7 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Mon, 8 Aug 2022 22:15:50 +0800 Subject: [PATCH 719/785] fix AiImage colorspace and UDIMs errored out while extracting the look --- openpype/hosts/maya/plugins/publish/extract_look.py | 9 +-------- 1 file changed, 1 insertion(+), 8 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_look.py b/openpype/hosts/maya/plugins/publish/extract_look.py index c6737c7215..9974f97f1b 100644 --- a/openpype/hosts/maya/plugins/publish/extract_look.py +++ b/openpype/hosts/maya/plugins/publish/extract_look.py @@ -429,14 +429,7 @@ class ExtractLook(openpype.api.Extractor): # node doesn't have color space attribute color_space = "Raw" else: - try: - if files_metadata[source]["color_space"] == "Raw": - # set color space to raw if we linearized it - color_space = "Raw" - except KeyError: - # set color space to Raw if its attribute is raw. - if cmds.getAttr(color_space_attr) == "Raw": - color_space = "Raw" + color_space = "Raw" # Remap file node filename to destination remap[color_space_attr] = color_space attr = resource["attribute"] From 13302ca23e804ab476e1822657b91c8369bd9cb9 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 8 Aug 2022 17:29:02 +0200 Subject: [PATCH 720/785] mix audio using side file for filters --- .../publish/extract_otio_audio_tracks.py | 98 ++++++++++++------- 1 file changed, 62 insertions(+), 36 deletions(-) diff --git a/openpype/plugins/publish/extract_otio_audio_tracks.py b/openpype/plugins/publish/extract_otio_audio_tracks.py index 00c1748cdc..ed30a2f0f5 100644 --- a/openpype/plugins/publish/extract_otio_audio_tracks.py +++ b/openpype/plugins/publish/extract_otio_audio_tracks.py @@ -57,15 +57,7 @@ class ExtractOtioAudioTracks(pyblish.api.ContextPlugin): audio_inputs.insert(0, empty) # create cmd - cmd = path_to_subprocess_arg(self.ffmpeg_path) + " " - cmd += self.create_cmd(audio_inputs) - cmd += path_to_subprocess_arg(audio_temp_fpath) - - # run subprocess - self.log.debug("Executing: {}".format(cmd)) - openpype.api.run_subprocess( - cmd, shell=True, logger=self.log - ) + self.mix_audio(audio_inputs, audio_temp_fpath) # remove empty os.remove(empty["mediaPath"]) @@ -245,46 +237,80 @@ class ExtractOtioAudioTracks(pyblish.api.ContextPlugin): "durationSec": max_duration_sec } - def create_cmd(self, inputs): + def mix_audio(self, audio_inputs, audio_temp_fpath): """Creating multiple input cmd string Args: - inputs (list): list of input dicts. Order mater. + audio_inputs (list): list of input dicts. Order mater. Returns: str: the command body - """ + + longest_input = 0 + for audio_input in audio_inputs: + audio_len = audio_input["durationSec"] + if audio_len > longest_input: + longest_input = audio_len + # create cmd segments - _inputs = "" - _filters = "-filter_complex \"" - _channels = "" - for index, input in enumerate(inputs): - input_format = input.copy() - input_format.update({"i": index}) - input_format["mediaPath"] = path_to_subprocess_arg( - input_format["mediaPath"] + input_args = [] + filters = [] + tag_names = [] + for index, audio_input in enumerate(audio_inputs): + input_args.extend([ + "-ss", str(audio_input["startSec"]), + "-t", str(audio_input["durationSec"]), + "-i", audio_input["mediaPath"] + ]) + + # Output tag of a filtered audio input + tag_name = "[r{}]".format(index) + tag_names.append(tag_name) + # Delay in audio by delay in item + filters.append("[{}]adelay={}:all=1{}".format( + index, audio_input["delayMilSec"], tag_name + )) + + # Mixing filter + # - dropout transition (when audio will get loader) is set to be + # higher then any input audio item + # - volume is set to number of inputs - each mix adds 1/n volume + # where n is input inder (to get more info read ffmpeg docs and + # send a giftcard to contributor) + filters.append( + ( + "{}amix=inputs={}:duration=first:" + "dropout_transition={},volume={}[a]" + ).format( + "".join(tag_names), + len(audio_inputs), + (longest_input * 1000) + 1000, + len(audio_inputs), ) + ) - _inputs += ( - "-ss {startSec} " - "-t {durationSec} " - "-i {mediaPath} " - ).format(**input_format) + # Store filters to a file (separated by ',') + # - this is to avoid "too long" command issue in ffmpeg + with tempfile.NamedTemporaryFile( + delete=False, mode="w", suffix=".txt" + ) as tmp_file: + filters_tmp_filepath = tmp_file.name + tmp_file.write(",".join(filters)) - _filters += "[{i}]adelay={delayMilSec}:all=1[r{i}]; ".format( - **input_format) - _channels += "[r{}]".format(index) + args = [self.ffmpeg_path] + args.extend(input_args) + args.extend([ + "-filter_complex_script", filters_tmp_filepath, + "-map", "[a]" + ]) + args.append(audio_temp_fpath) - # merge all cmd segments together - cmd = _inputs + _filters + _channels - cmd += str( - "amix=inputs={inputs}:duration=first:" - "dropout_transition=1000,volume={inputs}[a]\" " - ).format(inputs=len(inputs)) - cmd += "-map \"[a]\" " + # run subprocess + self.log.debug("Executing: {}".format(args)) + openpype.api.run_subprocess(args, logger=self.log) - return cmd + os.remove(filters_tmp_filepath) def create_temp_file(self, name): """Create temp wav file From 26572719c9eb82dc6f818665c2544ef376d6769a Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Mon, 8 Aug 2022 17:01:40 +0100 Subject: [PATCH 721/785] Added FBX support for update in reference loader --- openpype/hosts/maya/api/plugin.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/maya/api/plugin.py b/openpype/hosts/maya/api/plugin.py index 9280805945..2b0c6131b4 100644 --- a/openpype/hosts/maya/api/plugin.py +++ b/openpype/hosts/maya/api/plugin.py @@ -208,7 +208,8 @@ class ReferenceLoader(Loader): file_type = { "ma": "mayaAscii", "mb": "mayaBinary", - "abc": "Alembic" + "abc": "Alembic", + "fbx": "fbx" }.get(representation["name"]) assert file_type, "Unsupported representation: %s" % representation From ab810691c5d4d9dc3bc314a0b6ce482260d1a4ee Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 8 Aug 2022 22:34:57 +0200 Subject: [PATCH 722/785] nuke: wrong key name in settings for write node type --- openpype/hosts/nuke/api/lib.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/nuke/api/lib.py b/openpype/hosts/nuke/api/lib.py index 501ab4ba93..c1f49cbf8c 100644 --- a/openpype/hosts/nuke/api/lib.py +++ b/openpype/hosts/nuke/api/lib.py @@ -912,7 +912,7 @@ def get_render_path(node): avalon_knob_data = read_avalon_data(node) nuke_imageio_writes = get_imageio_node_setting( - node_class=avalon_knob_data["family"], + node_class=avalon_knob_data["families"], plugin_name=avalon_knob_data["creator"], subset=avalon_knob_data["subset"] ) @@ -1920,7 +1920,7 @@ class WorkfileSettings(object): families.append(avalon_knob_data.get("families")) nuke_imageio_writes = get_imageio_node_setting( - node_class=avalon_knob_data["family"], + node_class=avalon_knob_data["families"], plugin_name=avalon_knob_data["creator"], subset=avalon_knob_data["subset"] ) @@ -2219,7 +2219,7 @@ def get_write_node_template_attr(node): avalon_knob_data = read_avalon_data(node) # get template data nuke_imageio_writes = get_imageio_node_setting( - node_class=avalon_knob_data["family"], + node_class=avalon_knob_data["families"], plugin_name=avalon_knob_data["creator"], subset=avalon_knob_data["subset"] ) From 61457bffde96102079c3ccfb83b9a201a3ea4b8d Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Tue, 9 Aug 2022 15:19:12 +0800 Subject: [PATCH 723/785] fix the bug of failing to extract look with UDIMs format in aiIMage --- openpype/hosts/maya/plugins/publish/extract_look.py | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_look.py b/openpype/hosts/maya/plugins/publish/extract_look.py index 9974f97f1b..ed8ada3c62 100644 --- a/openpype/hosts/maya/plugins/publish/extract_look.py +++ b/openpype/hosts/maya/plugins/publish/extract_look.py @@ -429,7 +429,17 @@ class ExtractLook(openpype.api.Extractor): # node doesn't have color space attribute color_space = "Raw" else: - color_space = "Raw" + #get all the resolved files + src = files_metadata.get(source) + if src: + if files_metadata[source]["color_space"] == "Raw": + # set color space to raw if we linearized it + color_space = "Raw" + else: + # if the files are unresolved + if files_metadata[filepath]["color_space"] == "Raw": + # set color space to raw if we linearized it + color_space = "Raw" # Remap file node filename to destination remap[color_space_attr] = color_space attr = resource["attribute"] From de84296711bf8420850af5b065c328c55a2c7a27 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Tue, 9 Aug 2022 15:20:25 +0800 Subject: [PATCH 724/785] fix the bug of failing to extract look with UDIMs format in aiIMage --- openpype/hosts/maya/plugins/publish/extract_look.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_look.py b/openpype/hosts/maya/plugins/publish/extract_look.py index ed8ada3c62..d69eaffe59 100644 --- a/openpype/hosts/maya/plugins/publish/extract_look.py +++ b/openpype/hosts/maya/plugins/publish/extract_look.py @@ -429,7 +429,7 @@ class ExtractLook(openpype.api.Extractor): # node doesn't have color space attribute color_space = "Raw" else: - #get all the resolved files + # get all the resolved files src = files_metadata.get(source) if src: if files_metadata[source]["color_space"] == "Raw": From cb5dd41fba13c7f8e6a7fd62067d4bdddee46f66 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Tue, 9 Aug 2022 15:43:01 +0800 Subject: [PATCH 725/785] fix the bug of failing to extract look with UDIMs format in aiIMage --- openpype/hosts/maya/plugins/publish/extract_look.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_look.py b/openpype/hosts/maya/plugins/publish/extract_look.py index d69eaffe59..80d82a4f58 100644 --- a/openpype/hosts/maya/plugins/publish/extract_look.py +++ b/openpype/hosts/maya/plugins/publish/extract_look.py @@ -429,7 +429,7 @@ class ExtractLook(openpype.api.Extractor): # node doesn't have color space attribute color_space = "Raw" else: - # get all the resolved files + # get all the resolved files in Maya File Path Editor src = files_metadata.get(source) if src: if files_metadata[source]["color_space"] == "Raw": From b570374264f0a7cda4f5b4dc15f3c048a675548e Mon Sep 17 00:00:00 2001 From: OpenPype Date: Tue, 9 Aug 2022 08:28:26 +0000 Subject: [PATCH 726/785] [Automated] Bump version --- CHANGELOG.md | 21 +++++++++++++-------- openpype/version.py | 2 +- pyproject.toml | 2 +- 3 files changed, 15 insertions(+), 10 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 15a120ec2a..788c915b9d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,20 +1,29 @@ # Changelog -## [3.12.3-nightly.3](https://github.com/pypeclub/OpenPype/tree/HEAD) +## [3.13.0-nightly.1](https://github.com/pypeclub/OpenPype/tree/HEAD) [Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.12.2...HEAD) +**🆕 New features** + +- Support for mutliple installed versions - 3.13 [\#3605](https://github.com/pypeclub/OpenPype/pull/3605) + **🚀 Enhancements** +- Editorial: Mix audio use side file for ffmpeg filters [\#3630](https://github.com/pypeclub/OpenPype/pull/3630) - Ftrack: Comment template can contain optional keys [\#3615](https://github.com/pypeclub/OpenPype/pull/3615) - Ftrack: Add more metadata to ftrack components [\#3612](https://github.com/pypeclub/OpenPype/pull/3612) - General: Add context to pyblish context [\#3594](https://github.com/pypeclub/OpenPype/pull/3594) - Kitsu: Shot&Sequence name with prefix over appends [\#3593](https://github.com/pypeclub/OpenPype/pull/3593) - Photoshop: implemented {layer} placeholder in subset template [\#3591](https://github.com/pypeclub/OpenPype/pull/3591) +- General: Python module appdirs from git [\#3589](https://github.com/pypeclub/OpenPype/pull/3589) +- Ftrack: Update ftrack api to 2.3.3 [\#3588](https://github.com/pypeclub/OpenPype/pull/3588) - General: New Integrator small fixes [\#3583](https://github.com/pypeclub/OpenPype/pull/3583) **🐛 Bug fixes** +- Maya: fix aov separator in Redshift [\#3625](https://github.com/pypeclub/OpenPype/pull/3625) +- Fix for multi-version build on Mac [\#3622](https://github.com/pypeclub/OpenPype/pull/3622) - Ftrack: Sync hierarchical attributes can handle new created entities [\#3621](https://github.com/pypeclub/OpenPype/pull/3621) - General: Extract review aspect ratio scale is calculated by ffmpeg [\#3620](https://github.com/pypeclub/OpenPype/pull/3620) - Maya: Fix types of default settings [\#3617](https://github.com/pypeclub/OpenPype/pull/3617) @@ -23,6 +32,7 @@ - Maya look data contents fails with custom attribute on group [\#3607](https://github.com/pypeclub/OpenPype/pull/3607) - TrayPublisher: Fix wrong conflict merge [\#3600](https://github.com/pypeclub/OpenPype/pull/3600) - Bugfix: Add OCIO as submodule to prepare for handling `maketx` color space conversion. [\#3590](https://github.com/pypeclub/OpenPype/pull/3590) +- Fix general settings environment variables resolution [\#3587](https://github.com/pypeclub/OpenPype/pull/3587) - Editorial publishing workflow improvements [\#3580](https://github.com/pypeclub/OpenPype/pull/3580) - General: Update imports in start script [\#3579](https://github.com/pypeclub/OpenPype/pull/3579) - Nuke: render family integration consistency [\#3576](https://github.com/pypeclub/OpenPype/pull/3576) @@ -32,8 +42,8 @@ **🔀 Refactored code** - General: Plugin settings handled by plugins [\#3623](https://github.com/pypeclub/OpenPype/pull/3623) +- General: Naive implementation of document create, update, delete [\#3601](https://github.com/pypeclub/OpenPype/pull/3601) - General: Use query functions in general code [\#3596](https://github.com/pypeclub/OpenPype/pull/3596) -- General: Separate extraction of template data into more functions [\#3574](https://github.com/pypeclub/OpenPype/pull/3574) - General: Lib cleanup [\#3571](https://github.com/pypeclub/OpenPype/pull/3571) **Merged pull requests:** @@ -60,7 +70,6 @@ - Ftrack: add source into Note [\#3509](https://github.com/pypeclub/OpenPype/pull/3509) - Add pack and unpack convenience scripts [\#3502](https://github.com/pypeclub/OpenPype/pull/3502) - NewPublisher: Keep plugins with mismatch target in report [\#3498](https://github.com/pypeclub/OpenPype/pull/3498) -- Nuke: load clip with options from settings [\#3497](https://github.com/pypeclub/OpenPype/pull/3497) **🐛 Bug fixes** @@ -84,13 +93,13 @@ **🔀 Refactored code** +- General: Separate extraction of template data into more functions [\#3574](https://github.com/pypeclub/OpenPype/pull/3574) - General: Use query functions in integrator [\#3563](https://github.com/pypeclub/OpenPype/pull/3563) - General: Mongo core connection moved to client [\#3531](https://github.com/pypeclub/OpenPype/pull/3531) - Refactor Integrate Asset [\#3530](https://github.com/pypeclub/OpenPype/pull/3530) - General: Client docstrings cleanup [\#3529](https://github.com/pypeclub/OpenPype/pull/3529) - General: Move load related functions into pipeline [\#3527](https://github.com/pypeclub/OpenPype/pull/3527) - General: Get current context document functions [\#3522](https://github.com/pypeclub/OpenPype/pull/3522) -- Kitsu: Use query function from client [\#3496](https://github.com/pypeclub/OpenPype/pull/3496) **Merged pull requests:** @@ -100,10 +109,6 @@ [Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.12.1-nightly.6...3.12.1) -**🚀 Enhancements** - -- TrayPublisher: Added more options for grouping of instances [\#3494](https://github.com/pypeclub/OpenPype/pull/3494) - ## [3.12.0](https://github.com/pypeclub/OpenPype/tree/3.12.0) (2022-06-28) [Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.12.0-nightly.3...3.12.0) diff --git a/openpype/version.py b/openpype/version.py index 3f1056249a..5dc4c0be8a 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.12.3-nightly.3" +__version__ = "3.13.0-nightly.1" diff --git a/pyproject.toml b/pyproject.toml index 31a6505280..13a7609920 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "OpenPype" -version = "3.12.3-nightly.3" # OpenPype +version = "3.13.0-nightly.1" # OpenPype description = "Open VFX and Animation pipeline with support." authors = ["OpenPype Team "] license = "MIT License" From e595dbba85733664544c4073f92fde1a1063b68f Mon Sep 17 00:00:00 2001 From: OpenPype Date: Tue, 9 Aug 2022 08:39:56 +0000 Subject: [PATCH 727/785] [Automated] Release --- CHANGELOG.md | 7 ++++--- openpype/version.py | 2 +- pyproject.toml | 2 +- 3 files changed, 6 insertions(+), 5 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 788c915b9d..3124201758 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,8 +1,8 @@ # Changelog -## [3.13.0-nightly.1](https://github.com/pypeclub/OpenPype/tree/HEAD) +## [3.13.0](https://github.com/pypeclub/OpenPype/tree/3.13.0) (2022-08-09) -[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.12.2...HEAD) +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.12.2...3.13.0) **🆕 New features** @@ -44,6 +44,7 @@ - General: Plugin settings handled by plugins [\#3623](https://github.com/pypeclub/OpenPype/pull/3623) - General: Naive implementation of document create, update, delete [\#3601](https://github.com/pypeclub/OpenPype/pull/3601) - General: Use query functions in general code [\#3596](https://github.com/pypeclub/OpenPype/pull/3596) +- General: Separate extraction of template data into more functions [\#3574](https://github.com/pypeclub/OpenPype/pull/3574) - General: Lib cleanup [\#3571](https://github.com/pypeclub/OpenPype/pull/3571) **Merged pull requests:** @@ -88,12 +89,12 @@ - General: Fix hash of centos oiio archive [\#3519](https://github.com/pypeclub/OpenPype/pull/3519) - Maya: Renderman display output fix [\#3514](https://github.com/pypeclub/OpenPype/pull/3514) - TrayPublisher: Simple creation enhancements and fixes [\#3513](https://github.com/pypeclub/OpenPype/pull/3513) +- NewPublisher: Publish attributes are properly collected [\#3510](https://github.com/pypeclub/OpenPype/pull/3510) - TrayPublisher: Make sure host name is filled [\#3504](https://github.com/pypeclub/OpenPype/pull/3504) - NewPublisher: Groups work and enum multivalue [\#3501](https://github.com/pypeclub/OpenPype/pull/3501) **🔀 Refactored code** -- General: Separate extraction of template data into more functions [\#3574](https://github.com/pypeclub/OpenPype/pull/3574) - General: Use query functions in integrator [\#3563](https://github.com/pypeclub/OpenPype/pull/3563) - General: Mongo core connection moved to client [\#3531](https://github.com/pypeclub/OpenPype/pull/3531) - Refactor Integrate Asset [\#3530](https://github.com/pypeclub/OpenPype/pull/3530) diff --git a/openpype/version.py b/openpype/version.py index 5dc4c0be8a..d2eb3a8ab6 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.13.0-nightly.1" +__version__ = "3.13.0" diff --git a/pyproject.toml b/pyproject.toml index 13a7609920..03922a8e67 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "OpenPype" -version = "3.13.0-nightly.1" # OpenPype +version = "3.13.0" # OpenPype description = "Open VFX and Animation pipeline with support." authors = ["OpenPype Team "] license = "MIT License" From 9427d791ea3536dda99e591280cc415969f1e3c1 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 9 Aug 2022 11:19:08 +0200 Subject: [PATCH 728/785] moved workfile path resolving into openpype/pipeline/workfile --- openpype/pipeline/workfile/__init__.py | 14 ++ openpype/pipeline/workfile/path_resolving.py | 184 +++++++++++++++++++ 2 files changed, 198 insertions(+) create mode 100644 openpype/pipeline/workfile/__init__.py create mode 100644 openpype/pipeline/workfile/path_resolving.py diff --git a/openpype/pipeline/workfile/__init__.py b/openpype/pipeline/workfile/__init__.py new file mode 100644 index 0000000000..3a51491cdd --- /dev/null +++ b/openpype/pipeline/workfile/__init__.py @@ -0,0 +1,14 @@ +from .path_resolving import ( + get_workfile_template_key_from_context, + get_workfile_template_key, + get_workdir_with_workdir_data, + get_workdir, +) + + +__all__ = ( + "get_workfile_template_key_from_context", + "get_workfile_template_key", + "get_workdir_with_workdir_data", + "get_workdir", +) diff --git a/openpype/pipeline/workfile/path_resolving.py b/openpype/pipeline/workfile/path_resolving.py new file mode 100644 index 0000000000..9525dd59dc --- /dev/null +++ b/openpype/pipeline/workfile/path_resolving.py @@ -0,0 +1,184 @@ +from openpype.client import get_asset_by_name +from openpype.settings import get_project_settings +from openpype.lib import filter_profiles +from openpype.pipeline import Anatomy +from openpype.pipeline.template_data import get_template_data + + +def get_workfile_template_key_from_context( + asset_name, task_name, host_name, project_name, project_settings=None +): + """Helper function to get template key for workfile template. + + Do the same as `get_workfile_template_key` but returns value for "session + context". + + It is required to pass one of 'dbcon' with already set project name or + 'project_name' arguments. + + Args: + asset_name(str): Name of asset document. + task_name(str): Task name for which is template key retrieved. + Must be available on asset document under `data.tasks`. + host_name(str): Name of host implementation for which is workfile + used. + project_name(str): Project name where asset and task is. Not required + when 'dbcon' is passed. + project_settings(Dict[str, Any]): Project settings for passed + 'project_name'. Not required at all but makes function faster. + """ + + asset_doc = get_asset_by_name( + project_name, asset_name, fields=["data.tasks"] + ) + asset_tasks = asset_doc.get("data", {}).get("tasks") or {} + task_info = asset_tasks.get(task_name) or {} + task_type = task_info.get("type") + + return get_workfile_template_key( + task_type, host_name, project_name, project_settings + ) + + +def get_workfile_template_key( + task_type, host_name, project_name, project_settings=None +): + """Workfile template key which should be used to get workfile template. + + Function is using profiles from project settings to return right template + for passet task type and host name. + + Args: + task_type(str): Name of task type. + host_name(str): Name of host implementation (e.g. "maya", "nuke", ...) + project_name(str): Name of project in which context should look for + settings. + project_settings(Dict[str, Any]): Prepared project settings for + project name. Optional to make processing faster. + """ + + default = "work" + if not task_type or not host_name: + return default + + if not project_settings: + project_settings = get_project_settings(project_name) + + try: + profiles = ( + project_settings + ["global"] + ["tools"] + ["Workfiles"] + ["workfile_template_profiles"] + ) + except Exception: + profiles = [] + + if not profiles: + return default + + profile_filter = { + "task_types": task_type, + "hosts": host_name + } + profile = filter_profiles(profiles, profile_filter) + if profile: + return profile["workfile_template"] or default + return default + + +def get_workdir_with_workdir_data( + workdir_data, + project_name, + anatomy=None, + template_key=None, + project_settings=None +): + """Fill workdir path from entered data and project's anatomy. + + It is possible to pass only project's name instead of project's anatomy but + one of them **must** be entered. It is preferred to enter anatomy if is + available as initialization of a new Anatomy object may be time consuming. + + Args: + workdir_data (Dict[str, Any]): Data to fill workdir template. + project_name (str): Project's name. + otherwise Anatomy object is created with using the project name. + anatomy (Anatomy): Anatomy object for specific project. Faster + processing if is passed. + template_key (str): Key of work templates in anatomy templates. If not + passed `get_workfile_template_key_from_context` is used to get it. + project_settings(Dict[str, Any]): Prepared project settings for + project name. Optional to make processing faster. Ans id used only + if 'template_key' is not passed. + + Returns: + TemplateResult: Workdir path. + """ + + if not anatomy: + anatomy = Anatomy(project_name) + + if not template_key: + template_key = get_workfile_template_key( + workdir_data["task"]["type"], + workdir_data["app"], + workdir_data["project"]["name"], + project_settings + ) + + anatomy_filled = anatomy.format(workdir_data) + # Output is TemplateResult object which contain useful data + output = anatomy_filled[template_key]["folder"] + if output: + return output.normalized() + return output + + +def get_workdir( + project_doc, + asset_doc, + task_name, + host_name, + anatomy=None, + template_key=None, + project_settings=None +): + """Fill workdir path from entered data and project's anatomy. + + Args: + project_doc (Dict[str, Any]): Mongo document of project from MongoDB. + asset_doc (Dict[str, Any]): Mongo document of asset from MongoDB. + task_name (str): Task name for which are workdir data preapred. + host_name (str): Host which is used to workdir. This is required + because workdir template may contain `{app}` key. In `Session` + is stored under `AVALON_APP` key. + anatomy (Anatomy): Optional argument. Anatomy object is created using + project name from `project_doc`. It is preferred to pass this + argument as initialization of a new Anatomy object may be time + consuming. + template_key (str): Key of work templates in anatomy templates. Default + value is defined in `get_workdir_with_workdir_data`. + project_settings(Dict[str, Any]): Prepared project settings for + project name. Optional to make processing faster. Ans id used only + if 'template_key' is not passed. + + Returns: + TemplateResult: Workdir path. + """ + + if not anatomy: + anatomy = Anatomy(project_doc["name"]) + + workdir_data = get_template_data( + project_doc, asset_doc, task_name, host_name + ) + # Output is TemplateResult object which contain useful data + return get_workdir_with_workdir_data( + workdir_data, + anatomy.project_name, + anatomy, + template_key, + project_settings + ) From fabec0819beeab79cf1695d164420896254d750c Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 9 Aug 2022 11:19:29 +0200 Subject: [PATCH 729/785] maked moved functions as deprecated --- openpype/lib/avalon_context.py | 100 +++++++++++---------------------- 1 file changed, 32 insertions(+), 68 deletions(-) diff --git a/openpype/lib/avalon_context.py b/openpype/lib/avalon_context.py index 42854f39d6..636806d1f4 100644 --- a/openpype/lib/avalon_context.py +++ b/openpype/lib/avalon_context.py @@ -321,6 +321,8 @@ def get_latest_version(asset_name, subset_name, dbcon=None, project_name=None): ) +@deprecated( + "openpype.pipeline.workfile.get_workfile_template_key_from_context") def get_workfile_template_key_from_context( asset_name, task_name, host_name, project_name=None, dbcon=None, project_settings=None @@ -349,27 +351,26 @@ def get_workfile_template_key_from_context( ValueError: When both 'dbcon' and 'project_name' were not passed. """ + + from openpype.pipeline.workfile import ( + get_workfile_template_key_from_context + ) + if not project_name: if not dbcon: raise ValueError(( "`get_workfile_template_key_from_context` requires to pass" " one of 'dbcon' or 'project_name' arguments." )) - project_name = dbcon.active_project() - asset_doc = get_asset_by_name( - project_name, asset_name, fields=["data.tasks"] - ) - asset_tasks = asset_doc.get("data", {}).get("tasks") or {} - task_info = asset_tasks.get(task_name) or {} - task_type = task_info.get("type") - - return get_workfile_template_key( - task_type, host_name, project_name, project_settings + return get_workfile_template_key_from_context( + asset_name, task_name, host_name, project_name, project_settings ) +@deprecated( + "openpype.pipeline.workfile.get_workfile_template_key") def get_workfile_template_key( task_type, host_name, project_name=None, project_settings=None ): @@ -393,40 +394,12 @@ def get_workfile_template_key( ValueError: When both 'project_name' and 'project_settings' were not passed. """ - default = "work" - if not task_type or not host_name: - return default - if not project_settings: - if not project_name: - raise ValueError(( - "`get_workfile_template_key` requires to pass" - " one of 'project_name' or 'project_settings' arguments." - )) - project_settings = get_project_settings(project_name) + from openpype.pipeline.workfile import get_workfile_template_key - try: - profiles = ( - project_settings - ["global"] - ["tools"] - ["Workfiles"] - ["workfile_template_profiles"] - ) - except Exception: - profiles = [] - - if not profiles: - return default - - profile_filter = { - "task_types": task_type, - "hosts": host_name - } - profile = filter_profiles(profiles, profile_filter) - if profile: - return profile["workfile_template"] or default - return default + return get_workfile_template_key( + task_type, host_name, project_name, project_settings + ) @deprecated("openpype.pipeline.template_data.get_template_data") @@ -454,6 +427,7 @@ def get_workdir_data(project_doc, asset_doc, task_name, host_name): ) +@deprecated("openpype.pipeline.workfile.get_workdir_with_workdir_data") def get_workdir_with_workdir_data( workdir_data, anatomy=None, project_name=None, template_key=None ): @@ -480,31 +454,24 @@ def get_workdir_with_workdir_data( Raises: ValueError: When both `anatomy` and `project_name` are set to None. """ + if not anatomy and not project_name: raise ValueError(( "Missing required arguments one of `project_name` or `anatomy`" " must be entered." )) - if not anatomy: - from openpype.pipeline import Anatomy - anatomy = Anatomy(project_name) + if not project_name: + project_name = anatomy.project_name - if not template_key: - template_key = get_workfile_template_key( - workdir_data["task"]["type"], - workdir_data["app"], - project_name=workdir_data["project"]["name"] - ) + from openpype.pipeline.workfile import get_workdir_with_workdir_data - anatomy_filled = anatomy.format(workdir_data) - # Output is TemplateResult object which contain useful data - output = anatomy_filled[template_key]["folder"] - if output: - return output.normalized() - return output + return get_workdir_with_workdir_data( + workdir_data, project_name, anatomy, template_key + ) +@deprecated("openpype.pipeline.workfile.get_workdir_with_workdir_data") def get_workdir( project_doc, asset_doc, @@ -533,18 +500,15 @@ def get_workdir( TemplateResult: Workdir path. """ - from openpype.pipeline import Anatomy - from openpype.pipeline.template_data import get_template_data - - if not anatomy: - anatomy = Anatomy(project_doc["name"]) - - workdir_data = get_template_data( - project_doc, asset_doc, task_name, host_name - ) + from openpype.pipeline.workfile import get_workdir # Output is TemplateResult object which contain useful data - return get_workdir_with_workdir_data( - workdir_data, anatomy, template_key=template_key + return get_workdir( + project_doc, + asset_doc, + task_name, + host_name, + anatomy, + template_key ) From 97d55eb335e417102c519d10f280a28afb3275c4 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 9 Aug 2022 11:39:17 +0200 Subject: [PATCH 730/785] modified docstrings --- openpype/pipeline/workfile/path_resolving.py | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/openpype/pipeline/workfile/path_resolving.py b/openpype/pipeline/workfile/path_resolving.py index 9525dd59dc..07a814f616 100644 --- a/openpype/pipeline/workfile/path_resolving.py +++ b/openpype/pipeline/workfile/path_resolving.py @@ -13,17 +13,13 @@ def get_workfile_template_key_from_context( Do the same as `get_workfile_template_key` but returns value for "session context". - It is required to pass one of 'dbcon' with already set project name or - 'project_name' arguments. - Args: asset_name(str): Name of asset document. task_name(str): Task name for which is template key retrieved. Must be available on asset document under `data.tasks`. host_name(str): Name of host implementation for which is workfile used. - project_name(str): Project name where asset and task is. Not required - when 'dbcon' is passed. + project_name(str): Project name where asset and task is. project_settings(Dict[str, Any]): Project settings for passed 'project_name'. Not required at all but makes function faster. """ @@ -104,7 +100,6 @@ def get_workdir_with_workdir_data( Args: workdir_data (Dict[str, Any]): Data to fill workdir template. project_name (str): Project's name. - otherwise Anatomy object is created with using the project name. anatomy (Anatomy): Anatomy object for specific project. Faster processing if is passed. template_key (str): Key of work templates in anatomy templates. If not From c4a932d3e2cf989b7f98e7d309b6368049619679 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 9 Aug 2022 12:17:42 +0200 Subject: [PATCH 731/785] Refactor `get_output_link_versions` to query `data.inputLinks.id` instead of `data.inputLinks.input` --- openpype/client/entities.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/client/entities.py b/openpype/client/entities.py index dd5d831ecf..326c8a58a9 100644 --- a/openpype/client/entities.py +++ b/openpype/client/entities.py @@ -819,7 +819,7 @@ def get_output_link_versions(project_name, version_id, fields=None): # Does make sense to look for hero versions? query_filter = { "type": "version", - "data.inputLinks.input": version_id + "data.inputLinks.id": version_id } return conn.find(query_filter, _prepare_fields(fields)) From 48c94ea22b0f53108d3023f48bd3c681b108b60d Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 9 Aug 2022 12:29:30 +0200 Subject: [PATCH 732/785] added operations for workfile info --- openpype/client/operations.py | 47 +++++++++++++++++++++++++++++++++++ 1 file changed, 47 insertions(+) diff --git a/openpype/client/operations.py b/openpype/client/operations.py index 69d1eb2bb6..c4b95bf696 100644 --- a/openpype/client/operations.py +++ b/openpype/client/operations.py @@ -17,6 +17,7 @@ CURRENT_ASSET_DOC_SCHEMA = "openpype:asset-3.0" CURRENT_SUBSET_SCHEMA = "openpype:subset-3.0" CURRENT_VERSION_SCHEMA = "openpype:version-3.0" CURRENT_REPRESENTATION_SCHEMA = "openpype:representation-2.0" +CURRENT_WORKFILE_INFO_SCHEMA = "openpype:workfile-1.0" def _create_or_convert_to_mongo_id(mongo_id): @@ -188,6 +189,38 @@ def new_representation_doc( } +def new_workfile_info_doc( + filename, asset_id, task_name, files, data=None, entity_id=None +): + """Create skeleton data of workfile info document. + + Workfile document is at this moment used primarily for artist notes. + + Args: + filename (str): Filename of workfile. + asset_id (Union[str, ObjectId]): Id of asset under which workfile live. + task_name (str): Task under which was workfile created. + files (List[str]): List of rootless filepaths related to workfile. + data (Dict[str, Any]): Additional metadata. + + Returns: + Dict[str, Any]: Skeleton of workfile info document. + """ + + if not data: + data = {} + + return { + "_id": _create_or_convert_to_mongo_id(entity_id), + "type": "workfile", + "parent": ObjectId(asset_id), + "task_name": task_name, + "filename": filename, + "data": data, + "files": files + } + + def _prepare_update_data(old_doc, new_doc, replace): changes = {} for key, value in new_doc.items(): @@ -243,6 +276,20 @@ def prepare_representation_update_data(old_doc, new_doc, replace=True): return _prepare_update_data(old_doc, new_doc, replace) +def prepare_workfile_info_update_data(old_doc, new_doc, replace=True): + """Compare two workfile info documents and prepare update data. + + Based on compared values will create update data for 'UpdateOperation'. + + Empty output means that documents are identical. + + Returns: + Dict[str, Any]: Changes between old and new document. + """ + + return _prepare_update_data(old_doc, new_doc, replace) + + @six.add_metaclass(ABCMeta) class AbstractOperation(object): """Base operation class. From adcc7010c2f84e2cd6edc2fe01065082cb63f8ca Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 9 Aug 2022 12:31:08 +0200 Subject: [PATCH 733/785] workfiles tool use operations session to create workfile info documents --- openpype/tools/workfiles/window.py | 69 +++++++++++++++++++++--------- 1 file changed, 48 insertions(+), 21 deletions(-) diff --git a/openpype/tools/workfiles/window.py b/openpype/tools/workfiles/window.py index 0b0d67e589..de42b80d64 100644 --- a/openpype/tools/workfiles/window.py +++ b/openpype/tools/workfiles/window.py @@ -1,18 +1,20 @@ import os import datetime +import copy from Qt import QtCore, QtWidgets, QtGui from openpype.client import ( - get_asset_by_id, get_asset_by_name, get_workfile_info, ) +from openpype.client.operations import ( + OperationsSession, + new_workfile_info_doc, + prepare_workfile_info_update_data, +) from openpype import style from openpype import resources -from openpype.lib import ( - create_workfile_doc, - save_workfile_data_to_doc, -) +from openpype.pipeline import Anatomy from openpype.pipeline import legacy_io from openpype.tools.utils.assets_widget import SingleSelectAssetsWidget from openpype.tools.utils.tasks_widget import TasksWidget @@ -324,10 +326,23 @@ class Window(QtWidgets.QWidget): workfile_doc, data = self.side_panel.get_workfile_data() if not workfile_doc: filepath = self.files_widget._get_selected_filepath() - self._create_workfile_doc(filepath, force=True) - workfile_doc = self._get_current_workfile_doc() + workfile_doc = self._create_workfile_doc(filepath) - save_workfile_data_to_doc(workfile_doc, data, legacy_io) + new_workfile_doc = copy.deepcopy(workfile_doc) + new_workfile_doc["data"] = data + update_data = prepare_workfile_info_update_data( + workfile_doc, new_workfile_doc + ) + if not update_data: + return + + project_name = legacy_io.active_project() + + session = OperationsSession() + session.update_entity( + project_name, "workfile", workfile_doc["_id"], update_data + ) + session.commit() def _get_current_workfile_doc(self, filepath=None): if filepath is None: @@ -343,20 +358,32 @@ class Window(QtWidgets.QWidget): project_name, asset_id, task_name, filename ) - def _create_workfile_doc(self, filepath, force=False): - workfile_doc = None - if not force: - workfile_doc = self._get_current_workfile_doc(filepath) + def _create_workfile_doc(self, filepath): + workfile_doc = self._get_current_workfile_doc(filepath) + if workfile_doc: + return workfile_doc - if not workfile_doc: - workdir, filename = os.path.split(filepath) - asset_id = self.assets_widget.get_selected_asset_id() - project_name = legacy_io.active_project() - asset_doc = get_asset_by_id(project_name, asset_id) - task_name = self.tasks_widget.get_selected_task_name() - create_workfile_doc( - asset_doc, task_name, filename, workdir, legacy_io - ) + workdir, filename = os.path.split(filepath) + + project_name = legacy_io.active_project() + asset_id = self.assets_widget.get_selected_asset_id() + task_name = self.tasks_widget.get_selected_task_name() + + anatomy = Anatomy(project_name) + success, rootless_dir = anatomy.find_root_template_from_path(workdir) + filepath = "/".join([ + os.path.normpath(rootless_dir).replace("\\", "/"), + filename + ]) + + workfile_doc = new_workfile_info_doc( + filename, asset_id, task_name, [filepath] + ) + + session = OperationsSession() + session.create_entity(project_name, "workfile", workfile_doc) + session.commit() + return workfile_doc def refresh(self): # Refresh asset widget From c64578684d4d280121c30d402815934c54af6683 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 9 Aug 2022 12:31:26 +0200 Subject: [PATCH 734/785] marked create and update workfile doc functions as deprecated --- openpype/lib/avalon_context.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/openpype/lib/avalon_context.py b/openpype/lib/avalon_context.py index 636806d1f4..c341b35b71 100644 --- a/openpype/lib/avalon_context.py +++ b/openpype/lib/avalon_context.py @@ -670,7 +670,6 @@ def update_current_task(task=None, asset=None, app=None, template_key=None): return changes -@with_pipeline_io @deprecated("openpype.client.get_workfile_info") def get_workfile_doc(asset_id, task_name, filename, dbcon=None): """Return workfile document for entered context. @@ -691,13 +690,14 @@ def get_workfile_doc(asset_id, task_name, filename, dbcon=None): # Use legacy_io if dbcon is not entered if not dbcon: + from openpype.pipeline import legacy_io dbcon = legacy_io project_name = dbcon.active_project() return get_workfile_info(project_name, asset_id, task_name, filename) -@with_pipeline_io +@deprecated def create_workfile_doc(asset_doc, task_name, filename, workdir, dbcon=None): """Creates or replace workfile document in mongo. @@ -718,6 +718,7 @@ def create_workfile_doc(asset_doc, task_name, filename, workdir, dbcon=None): # Use legacy_io if dbcon is not entered if not dbcon: + from openpype.pipeline import legacy_io dbcon = legacy_io # Filter of workfile document @@ -764,7 +765,7 @@ def create_workfile_doc(asset_doc, task_name, filename, workdir, dbcon=None): ) -@with_pipeline_io +@deprecated def save_workfile_data_to_doc(workfile_doc, data, dbcon=None): if not workfile_doc: # TODO add log message @@ -775,6 +776,7 @@ def save_workfile_data_to_doc(workfile_doc, data, dbcon=None): # Use legacy_io if dbcon is not entered if not dbcon: + from openpype.pipeline import legacy_io dbcon = legacy_io # Convert data to mongo modification keys/values From b89e99e8905a91deda2211138570978023c3e26e Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 9 Aug 2022 12:47:29 +0200 Subject: [PATCH 735/785] change imports of 'get_workfile_template_key', 'get_workfile_template_key_from_context' and 'get_workdir_with_workdir_data' and 'get_workdir' in code --- .../plugins/publish/integrate_batch_group.py | 10 +++++++-- .../tvpaint/plugins/load/load_workfile.py | 7 +++--- openpype/lib/applications.py | 22 +++++++++++++------ .../action_fill_workfile_attr.py | 11 +++++----- openpype/tools/workfiles/files_widget.py | 3 ++- 5 files changed, 35 insertions(+), 18 deletions(-) diff --git a/openpype/hosts/flame/plugins/publish/integrate_batch_group.py b/openpype/hosts/flame/plugins/publish/integrate_batch_group.py index b59107f155..4d45f67ded 100644 --- a/openpype/hosts/flame/plugins/publish/integrate_batch_group.py +++ b/openpype/hosts/flame/plugins/publish/integrate_batch_group.py @@ -3,9 +3,9 @@ import copy from collections import OrderedDict from pprint import pformat import pyblish -from openpype.lib import get_workdir import openpype.hosts.flame.api as opfapi import openpype.pipeline as op_pipeline +from openpype.pipeline.workfile import get_workdir class IntegrateBatchGroup(pyblish.api.InstancePlugin): @@ -324,7 +324,13 @@ class IntegrateBatchGroup(pyblish.api.InstancePlugin): project_doc = instance.data["projectEntity"] asset_entity = instance.data["assetEntity"] anatomy = instance.context.data["anatomy"] + project_settings = instance.context.data["project_settings"] return get_workdir( - project_doc, asset_entity, task_data["name"], "flame", anatomy + project_doc, + asset_entity, + task_data["name"], + "flame", + anatomy, + project_settings=project_settings ) diff --git a/openpype/hosts/tvpaint/plugins/load/load_workfile.py b/openpype/hosts/tvpaint/plugins/load/load_workfile.py index 8b09d20755..40ce972a09 100644 --- a/openpype/hosts/tvpaint/plugins/load/load_workfile.py +++ b/openpype/hosts/tvpaint/plugins/load/load_workfile.py @@ -2,7 +2,6 @@ import os from openpype.lib import ( StringTemplate, - get_workfile_template_key_from_context, get_last_workfile_with_version, ) from openpype.pipeline import ( @@ -10,6 +9,9 @@ from openpype.pipeline import ( legacy_io, Anatomy, ) +from openpype.pipeline.workfile import ( + get_workfile_template_key_from_context, +) from openpype.pipeline.template_data import get_template_data_with_names from openpype.hosts.tvpaint.api import lib, pipeline, plugin @@ -57,8 +59,7 @@ class LoadWorkfile(plugin.Loader): asset_name, task_name, host_name, - project_name=project_name, - dbcon=legacy_io + project_name=project_name ) anatomy = Anatomy(project_name) diff --git a/openpype/lib/applications.py b/openpype/lib/applications.py index da8623ea13..f1ddae6063 100644 --- a/openpype/lib/applications.py +++ b/openpype/lib/applications.py @@ -27,11 +27,7 @@ from openpype.settings.constants import ( from . import PypeLogger from .profiles_filtering import filter_profiles from .local_settings import get_openpype_username -from .avalon_context import ( - get_workdir_with_workdir_data, - get_workfile_template_key, - get_last_workfile -) +from .avalon_context import get_last_workfile from .python_module_tools import ( modules_from_path, @@ -1635,7 +1631,14 @@ def prepare_context_environments(data, env_group=None): data["task_type"] = task_type try: - workdir = get_workdir_with_workdir_data(workdir_data, anatomy) + from openpype.pipeline.workfile import get_workdir_with_workdir_data + + workdir = get_workdir_with_workdir_data( + workdir_data, + anatomy.project_name, + anatomy, + project_settings=project_settings + ) except Exception as exc: raise ApplicationLaunchFailed( @@ -1725,11 +1728,16 @@ def _prepare_last_workfile(data, workdir): if not last_workfile_path: extensions = HOST_WORKFILE_EXTENSIONS.get(app.host_name) if extensions: + from openpype.pipeline import get_workfile_template_key + anatomy = data["anatomy"] project_settings = data["project_settings"] task_type = workdir_data["task"]["type"] template_key = get_workfile_template_key( - task_type, app.host_name, project_settings=project_settings + task_type, + app.host_name, + project_name, + project_settings=project_settings ) # Find last workfile file_template = str(anatomy.templates[template_key]["file"]) diff --git a/openpype/modules/ftrack/event_handlers_user/action_fill_workfile_attr.py b/openpype/modules/ftrack/event_handlers_user/action_fill_workfile_attr.py index c7fa2dce5e..fb1cdf340e 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_fill_workfile_attr.py +++ b/openpype/modules/ftrack/event_handlers_user/action_fill_workfile_attr.py @@ -12,12 +12,10 @@ from openpype.client import ( get_assets, ) from openpype.settings import get_project_settings, get_system_settings -from openpype.lib import ( - get_workfile_template_key, - StringTemplate, -) +from openpype.lib import StringTemplate from openpype.pipeline import Anatomy from openpype.pipeline.template_data import get_template_data +from openpype.pipeline.workfile import get_workfile_template_key from openpype_modules.ftrack.lib import BaseAction, statics_icon from openpype_modules.ftrack.lib.avalon_sync import create_chunks @@ -299,7 +297,10 @@ class FillWorkfileAttributeAction(BaseAction): task_type = workfile_data["task"]["type"] template_key = get_workfile_template_key( - task_type, host_name, project_settings=project_settings + task_type, + host_name, + project_name, + project_settings=project_settings ) if template_key in templates_by_key: template = templates_by_key[template_key] diff --git a/openpype/tools/workfiles/files_widget.py b/openpype/tools/workfiles/files_widget.py index 34692b7102..a4109c511e 100644 --- a/openpype/tools/workfiles/files_widget.py +++ b/openpype/tools/workfiles/files_widget.py @@ -12,7 +12,6 @@ from openpype.tools.utils import PlaceholderLineEdit from openpype.tools.utils.delegates import PrettyTimeDelegate from openpype.lib import ( emit_event, - get_workfile_template_key, create_workdir_extra_folders, ) from openpype.lib.avalon_context import ( @@ -24,6 +23,8 @@ from openpype.pipeline import ( legacy_io, Anatomy, ) +from openpype.pipeline.workfile import get_workfile_template_key + from .model import ( WorkAreaFilesModel, PublishFilesModel, From 02007784faa52417e2e8bd9381dd4d7b523f1e1c Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 9 Aug 2022 12:56:50 +0200 Subject: [PATCH 736/785] moved 'get_last_workfile_with_version' and 'get_last_workfile' to 'openpype.pipeline.workfile' --- .../tvpaint/plugins/load/load_workfile.py | 6 +- openpype/lib/applications.py | 6 +- openpype/lib/avalon_context.py | 92 ++---------- openpype/pipeline/workfile/__init__.py | 6 + openpype/pipeline/workfile/path_resolving.py | 131 +++++++++++++++++- openpype/tools/workfiles/save_as_dialog.py | 2 +- 6 files changed, 153 insertions(+), 90 deletions(-) diff --git a/openpype/hosts/tvpaint/plugins/load/load_workfile.py b/openpype/hosts/tvpaint/plugins/load/load_workfile.py index 40ce972a09..a99b300730 100644 --- a/openpype/hosts/tvpaint/plugins/load/load_workfile.py +++ b/openpype/hosts/tvpaint/plugins/load/load_workfile.py @@ -1,9 +1,6 @@ import os -from openpype.lib import ( - StringTemplate, - get_last_workfile_with_version, -) +from openpype.lib import StringTemplate from openpype.pipeline import ( registered_host, legacy_io, @@ -11,6 +8,7 @@ from openpype.pipeline import ( ) from openpype.pipeline.workfile import ( get_workfile_template_key_from_context, + get_last_workfile_with_version, ) from openpype.pipeline.template_data import get_template_data_with_names from openpype.hosts.tvpaint.api import lib, pipeline, plugin diff --git a/openpype/lib/applications.py b/openpype/lib/applications.py index f1ddae6063..8c92665366 100644 --- a/openpype/lib/applications.py +++ b/openpype/lib/applications.py @@ -27,7 +27,6 @@ from openpype.settings.constants import ( from . import PypeLogger from .profiles_filtering import filter_profiles from .local_settings import get_openpype_username -from .avalon_context import get_last_workfile from .python_module_tools import ( modules_from_path, @@ -1728,7 +1727,10 @@ def _prepare_last_workfile(data, workdir): if not last_workfile_path: extensions = HOST_WORKFILE_EXTENSIONS.get(app.host_name) if extensions: - from openpype.pipeline import get_workfile_template_key + from openpype.pipeline.workfile import ( + get_workfile_template_key, + get_last_workfile + ) anatomy = data["anatomy"] project_settings = data["project_settings"] diff --git a/openpype/lib/avalon_context.py b/openpype/lib/avalon_context.py index c341b35b71..a2a1839218 100644 --- a/openpype/lib/avalon_context.py +++ b/openpype/lib/avalon_context.py @@ -1696,6 +1696,7 @@ def get_custom_workfile_template(template_profiles): ) +@deprecated("openpype.pipeline.workfile.get_last_workfile_with_version") def get_last_workfile_with_version( workdir, file_template, fill_data, extensions ): @@ -1711,78 +1712,15 @@ def get_last_workfile_with_version( tuple: Last workfile with version if there is any otherwise returns (None, None). """ - if not os.path.exists(workdir): - return None, None - # Fast match on extension - filenames = [ - filename - for filename in os.listdir(workdir) - if os.path.splitext(filename)[1] in extensions - ] + from openpype.pipeline.workfile import get_last_workfile_with_version - # Build template without optionals, version to digits only regex - # and comment to any definable value. - _ext = [] - for ext in extensions: - if not ext.startswith("."): - ext = "." + ext - # Escape dot for regex - ext = "\\" + ext - _ext.append(ext) - ext_expression = "(?:" + "|".join(_ext) + ")" - - # Replace `.{ext}` with `{ext}` so we are sure there is not dot at the end - file_template = re.sub(r"\.?{ext}", ext_expression, file_template) - # Replace optional keys with optional content regex - file_template = re.sub(r"<.*?>", r".*?", file_template) - # Replace `{version}` with group regex - file_template = re.sub(r"{version.*?}", r"([0-9]+)", file_template) - file_template = re.sub(r"{comment.*?}", r".+?", file_template) - file_template = StringTemplate.format_strict_template( - file_template, fill_data + return get_last_workfile_with_version( + workdir, file_template, fill_data, extensions ) - # Match with ignore case on Windows due to the Windows - # OS not being case-sensitive. This avoids later running - # into the error that the file did exist if it existed - # with a different upper/lower-case. - kwargs = {} - if platform.system().lower() == "windows": - kwargs["flags"] = re.IGNORECASE - - # Get highest version among existing matching files - version = None - output_filenames = [] - for filename in sorted(filenames): - match = re.match(file_template, filename, **kwargs) - if not match: - continue - - file_version = int(match.group(1)) - if version is None or file_version > version: - output_filenames[:] = [] - version = file_version - - if file_version == version: - output_filenames.append(filename) - - output_filename = None - if output_filenames: - if len(output_filenames) == 1: - output_filename = output_filenames[0] - else: - last_time = None - for _output_filename in output_filenames: - full_path = os.path.join(workdir, _output_filename) - mod_time = os.path.getmtime(full_path) - if last_time is None or last_time < mod_time: - output_filename = _output_filename - last_time = mod_time - - return output_filename, version - +@deprecated("openpype.pipeline.workfile.get_last_workfile") def get_last_workfile( workdir, file_template, fill_data, extensions, full_path=False ): @@ -1800,22 +1738,12 @@ def get_last_workfile( Returns: str: Last or first workfile as filename of full path to filename. """ - filename, version = get_last_workfile_with_version( - workdir, file_template, fill_data, extensions + + from openpype.pipeline.workfile import get_last_workfile + + return get_last_workfile( + workdir, file_template, fill_data, extensions, full_path ) - if filename is None: - data = copy.deepcopy(fill_data) - data["version"] = 1 - data.pop("comment", None) - if not data.get("ext"): - data["ext"] = extensions[0] - data["ext"] = data["ext"].replace('.', '') - filename = StringTemplate.format_strict_template(file_template, data) - - if full_path: - return os.path.normpath(os.path.join(workdir, filename)) - - return filename @with_pipeline_io diff --git a/openpype/pipeline/workfile/__init__.py b/openpype/pipeline/workfile/__init__.py index 3a51491cdd..dc4955f7af 100644 --- a/openpype/pipeline/workfile/__init__.py +++ b/openpype/pipeline/workfile/__init__.py @@ -3,6 +3,9 @@ from .path_resolving import ( get_workfile_template_key, get_workdir_with_workdir_data, get_workdir, + + get_last_workfile_with_version, + get_last_workfile, ) @@ -11,4 +14,7 @@ __all__ = ( "get_workfile_template_key", "get_workdir_with_workdir_data", "get_workdir", + + "get_last_workfile_with_version", + "get_last_workfile", ) diff --git a/openpype/pipeline/workfile/path_resolving.py b/openpype/pipeline/workfile/path_resolving.py index 07a814f616..7362902bcd 100644 --- a/openpype/pipeline/workfile/path_resolving.py +++ b/openpype/pipeline/workfile/path_resolving.py @@ -1,6 +1,11 @@ +import os +import re +import copy +import platform + from openpype.client import get_asset_by_name from openpype.settings import get_project_settings -from openpype.lib import filter_profiles +from openpype.lib import filter_profiles, StringTemplate from openpype.pipeline import Anatomy from openpype.pipeline.template_data import get_template_data @@ -177,3 +182,127 @@ def get_workdir( template_key, project_settings ) + + +def get_last_workfile_with_version( + workdir, file_template, fill_data, extensions +): + """Return last workfile version. + + Args: + workdir(str): Path to dir where workfiles are stored. + file_template(str): Template of file name. + fill_data(Dict[str, Any]): Data for filling template. + extensions(Iterable[str]): All allowed file extensions of workfile. + + Returns: + Tuple[Union[str, None], Union[int, None]]: Last workfile with version + if there is any workfile otherwise None for both. + """ + + if not os.path.exists(workdir): + return None, None + + # Fast match on extension + filenames = [ + filename + for filename in os.listdir(workdir) + if os.path.splitext(filename)[1] in extensions + ] + + # Build template without optionals, version to digits only regex + # and comment to any definable value. + _ext = [] + for ext in extensions: + if not ext.startswith("."): + ext = "." + ext + # Escape dot for regex + ext = "\\" + ext + _ext.append(ext) + ext_expression = "(?:" + "|".join(_ext) + ")" + + # Replace `.{ext}` with `{ext}` so we are sure there is not dot at the end + file_template = re.sub(r"\.?{ext}", ext_expression, file_template) + # Replace optional keys with optional content regex + file_template = re.sub(r"<.*?>", r".*?", file_template) + # Replace `{version}` with group regex + file_template = re.sub(r"{version.*?}", r"([0-9]+)", file_template) + file_template = re.sub(r"{comment.*?}", r".+?", file_template) + file_template = StringTemplate.format_strict_template( + file_template, fill_data + ) + + # Match with ignore case on Windows due to the Windows + # OS not being case-sensitive. This avoids later running + # into the error that the file did exist if it existed + # with a different upper/lower-case. + kwargs = {} + if platform.system().lower() == "windows": + kwargs["flags"] = re.IGNORECASE + + # Get highest version among existing matching files + version = None + output_filenames = [] + for filename in sorted(filenames): + match = re.match(file_template, filename, **kwargs) + if not match: + continue + + file_version = int(match.group(1)) + if version is None or file_version > version: + output_filenames[:] = [] + version = file_version + + if file_version == version: + output_filenames.append(filename) + + output_filename = None + if output_filenames: + if len(output_filenames) == 1: + output_filename = output_filenames[0] + else: + last_time = None + for _output_filename in output_filenames: + full_path = os.path.join(workdir, _output_filename) + mod_time = os.path.getmtime(full_path) + if last_time is None or last_time < mod_time: + output_filename = _output_filename + last_time = mod_time + + return output_filename, version + + +def get_last_workfile( + workdir, file_template, fill_data, extensions, full_path=False +): + """Return last workfile filename. + + Returns file with version 1 if there is not workfile yet. + + Args: + workdir(str): Path to dir where workfiles are stored. + file_template(str): Template of file name. + fill_data(Dict[str, Any]): Data for filling template. + extensions(Iterable[str]): All allowed file extensions of workfile. + full_path(bool): Full path to file is returned if set to True. + + Returns: + str: Last or first workfile as filename of full path to filename. + """ + + filename, version = get_last_workfile_with_version( + workdir, file_template, fill_data, extensions + ) + if filename is None: + data = copy.deepcopy(fill_data) + data["version"] = 1 + data.pop("comment", None) + if not data.get("ext"): + data["ext"] = extensions[0] + data["ext"] = data["ext"].replace('.', '') + filename = StringTemplate.format_strict_template(file_template, data) + + if full_path: + return os.path.normpath(os.path.join(workdir, filename)) + + return filename diff --git a/openpype/tools/workfiles/save_as_dialog.py b/openpype/tools/workfiles/save_as_dialog.py index ea602846e7..cded4eb1a5 100644 --- a/openpype/tools/workfiles/save_as_dialog.py +++ b/openpype/tools/workfiles/save_as_dialog.py @@ -5,11 +5,11 @@ import logging from Qt import QtWidgets, QtCore -from openpype.lib import get_last_workfile_with_version from openpype.pipeline import ( registered_host, legacy_io, ) +from openpype.pipeline.workfile import get_last_workfile_with_version from openpype.pipeline.template_data import get_template_data_with_names from openpype.tools.utils import PlaceholderLineEdit From bf463afc41abcb4afd25006422b17d940aee1300 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 9 Aug 2022 13:50:16 +0200 Subject: [PATCH 737/785] moved 'get_workdir_from_session' to context tools --- .../fusion/scripts/fusion_switch_shot.py | 2 +- .../hosts/fusion/utility_scripts/switch_ui.py | 2 +- openpype/lib/avalon_context.py | 27 +++----------- openpype/pipeline/context_tools.py | 35 +++++++++++++++++++ openpype/scripts/fusion_switch_shot.py | 2 +- 5 files changed, 43 insertions(+), 25 deletions(-) diff --git a/openpype/hosts/fusion/scripts/fusion_switch_shot.py b/openpype/hosts/fusion/scripts/fusion_switch_shot.py index 87ff8e2ffe..49ef340679 100644 --- a/openpype/hosts/fusion/scripts/fusion_switch_shot.py +++ b/openpype/hosts/fusion/scripts/fusion_switch_shot.py @@ -15,7 +15,7 @@ from openpype.pipeline import ( from openpype.lib import version_up from openpype.hosts.fusion import api from openpype.hosts.fusion.api import lib -from openpype.lib.avalon_context import get_workdir_from_session +from openpype.pipeline.context_tools import get_workdir_from_session log = logging.getLogger("Update Slap Comp") diff --git a/openpype/hosts/fusion/utility_scripts/switch_ui.py b/openpype/hosts/fusion/utility_scripts/switch_ui.py index 01d55db647..93f775b24b 100644 --- a/openpype/hosts/fusion/utility_scripts/switch_ui.py +++ b/openpype/hosts/fusion/utility_scripts/switch_ui.py @@ -14,7 +14,7 @@ from openpype.pipeline import ( legacy_io, ) from openpype.hosts.fusion import api -from openpype.lib.avalon_context import get_workdir_from_session +from openpype.pipeline.context_tools import get_workdir_from_session log = logging.getLogger("Fusion Switch Shot") diff --git a/openpype/lib/avalon_context.py b/openpype/lib/avalon_context.py index a2a1839218..1b2ac459a1 100644 --- a/openpype/lib/avalon_context.py +++ b/openpype/lib/avalon_context.py @@ -554,6 +554,8 @@ def compute_session_changes( dict: The required changes in the Session dictionary. """ + from openpype.pipeline.context_tools import get_workdir_from_session + changes = dict() # If no changes, return directly @@ -600,30 +602,11 @@ def compute_session_changes( return changes -@with_pipeline_io +@deprecated("openpype.pipeline.context_tools.get_workdir_from_session") def get_workdir_from_session(session=None, template_key=None): - from openpype.pipeline import Anatomy - from openpype.pipeline.context_tools import get_template_data_from_session + from openpype.pipeline.context_tools import get_workdir_from_session - if session is None: - session = legacy_io.Session - project_name = session["AVALON_PROJECT"] - host_name = session["AVALON_APP"] - anatomy = Anatomy(project_name) - template_data = get_template_data_from_session(session) - anatomy_filled = anatomy.format(template_data) - - if not template_key: - task_type = template_data["task"]["type"] - template_key = get_workfile_template_key( - task_type, - host_name, - project_name=project_name - ) - path = anatomy_filled[template_key]["folder"] - if path: - path = os.path.normpath(path) - return path + return get_workdir_from_session(session, template_key) @with_pipeline_io diff --git a/openpype/pipeline/context_tools.py b/openpype/pipeline/context_tools.py index c8c70e5ea8..13185c72b2 100644 --- a/openpype/pipeline/context_tools.py +++ b/openpype/pipeline/context_tools.py @@ -22,6 +22,7 @@ from openpype.settings import get_project_settings from .publish.lib import filter_pyblish_plugins from .anatomy import Anatomy from .template_data import get_template_data_with_names +from .workfile import get_workfile_template_key from . import ( legacy_io, register_loader_plugin_path, @@ -377,3 +378,37 @@ def get_template_data_from_session(session=None, system_settings=None): return get_template_data_with_names( project_name, asset_name, task_name, host_name, system_settings ) + + +def get_workdir_from_session(session=None, template_key=None): + """Template data for template fill from session keys. + + Args: + session (Union[Dict[str, str], None]): The Session to use. If not + provided use the currently active global Session. + template_key (str): Prepared template key from which workdir is + calculated. + + Returns: + str: Workdir path. + """ + + if session is None: + session = legacy_io.Session + project_name = session["AVALON_PROJECT"] + host_name = session["AVALON_APP"] + anatomy = Anatomy(project_name) + template_data = get_template_data_from_session(session) + anatomy_filled = anatomy.format(template_data) + + if not template_key: + task_type = template_data["task"]["type"] + template_key = get_workfile_template_key( + task_type, + host_name, + project_name=project_name + ) + path = anatomy_filled[template_key]["folder"] + if path: + path = os.path.normpath(path) + return path diff --git a/openpype/scripts/fusion_switch_shot.py b/openpype/scripts/fusion_switch_shot.py index 15f189e7cb..fc22f060a2 100644 --- a/openpype/scripts/fusion_switch_shot.py +++ b/openpype/scripts/fusion_switch_shot.py @@ -17,7 +17,7 @@ from openpype.pipeline import ( legacy_io, ) -from openpype.lib.avalon_context import get_workdir_from_session +from openpype.pipeline.context_tools import get_workdir_from_session log = logging.getLogger("Update Slap Comp") From 01d87ba032dc5930526f7740bdcbd4840b9fb508 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 9 Aug 2022 15:10:45 +0200 Subject: [PATCH 738/785] moved build workfile to 'openpype.pipeline.workfile' --- openpype/lib/avalon_context.py | 658 +----------------- openpype/pipeline/workfile/__init__.py | 4 + openpype/pipeline/workfile/build_workfile.py | 693 +++++++++++++++++++ 3 files changed, 701 insertions(+), 654 deletions(-) create mode 100644 openpype/pipeline/workfile/build_workfile.py diff --git a/openpype/lib/avalon_context.py b/openpype/lib/avalon_context.py index 1b2ac459a1..b32c9bce6d 100644 --- a/openpype/lib/avalon_context.py +++ b/openpype/lib/avalon_context.py @@ -777,661 +777,11 @@ def save_workfile_data_to_doc(workfile_doc, data, dbcon=None): ) -class BuildWorkfile: - """Wrapper for build workfile process. +@deprecated("openpype.pipeline.workfile.BuildWorkfile") +def BuildWorkfile(): + from openpype.pipeline.workfile import BuildWorkfile - Load representations for current context by build presets. Build presets - are host related, since each host has it's loaders. - """ - - log = logging.getLogger("BuildWorkfile") - - @staticmethod - def map_subsets_by_family(subsets): - subsets_by_family = collections.defaultdict(list) - for subset in subsets: - family = subset["data"].get("family") - if not family: - families = subset["data"].get("families") - if not families: - continue - family = families[0] - - subsets_by_family[family].append(subset) - return subsets_by_family - - def process(self): - """Main method of this wrapper. - - Building of workfile is triggered and is possible to implement - post processing of loaded containers if necessary. - """ - containers = self.build_workfile() - - return containers - - @with_pipeline_io - def build_workfile(self): - """Prepares and load containers into workfile. - - Loads latest versions of current and linked assets to workfile by logic - stored in Workfile profiles from presets. Profiles are set by host, - filtered by current task name and used by families. - - Each family can specify representation names and loaders for - representations and first available and successful loaded - representation is returned as container. - - At the end you'll get list of loaded containers per each asset. - - loaded_containers [{ - "asset_entity": , - "containers": [, , ...] - }, { - "asset_entity": , - "containers": [, ...] - }, { - ... - }] - """ - from openpype.pipeline import discover_loader_plugins - - # Get current asset name and entity - project_name = legacy_io.active_project() - current_asset_name = legacy_io.Session["AVALON_ASSET"] - current_asset_entity = get_asset_by_name( - project_name, current_asset_name - ) - # Skip if asset was not found - if not current_asset_entity: - print("Asset entity with name `{}` was not found".format( - current_asset_name - )) - return - - # Prepare available loaders - loaders_by_name = {} - for loader in discover_loader_plugins(): - loader_name = loader.__name__ - if loader_name in loaders_by_name: - raise KeyError( - "Duplicated loader name {0}!".format(loader_name) - ) - loaders_by_name[loader_name] = loader - - # Skip if there are any loaders - if not loaders_by_name: - self.log.warning("There are no registered loaders.") - return - - # Get current task name - current_task_name = legacy_io.Session["AVALON_TASK"] - - # Load workfile presets for task - self.build_presets = self.get_build_presets( - current_task_name, current_asset_entity - ) - - # Skip if there are any presets for task - if not self.build_presets: - self.log.warning( - "Current task `{}` does not have any loading preset.".format( - current_task_name - ) - ) - return - - # Get presets for loading current asset - current_context_profiles = self.build_presets.get("current_context") - # Get presets for loading linked assets - link_context_profiles = self.build_presets.get("linked_assets") - # Skip if both are missing - if not current_context_profiles and not link_context_profiles: - self.log.warning( - "Current task `{}` has empty loading preset.".format( - current_task_name - ) - ) - return - - elif not current_context_profiles: - self.log.warning(( - "Current task `{}` doesn't have any loading" - " preset for it's context." - ).format(current_task_name)) - - elif not link_context_profiles: - self.log.warning(( - "Current task `{}` doesn't have any" - "loading preset for it's linked assets." - ).format(current_task_name)) - - # Prepare assets to process by workfile presets - assets = [] - current_asset_id = None - if current_context_profiles: - # Add current asset entity if preset has current context set - assets.append(current_asset_entity) - current_asset_id = current_asset_entity["_id"] - - if link_context_profiles: - # Find and append linked assets if preset has set linked mapping - link_assets = get_linked_assets(current_asset_entity) - if link_assets: - assets.extend(link_assets) - - # Skip if there are no assets. This can happen if only linked mapping - # is set and there are no links for his asset. - if not assets: - self.log.warning( - "Asset does not have linked assets. Nothing to process." - ) - return - - # Prepare entities from database for assets - prepared_entities = self._collect_last_version_repres(assets) - - # Load containers by prepared entities and presets - loaded_containers = [] - # - Current asset containers - if current_asset_id and current_asset_id in prepared_entities: - current_context_data = prepared_entities.pop(current_asset_id) - loaded_data = self.load_containers_by_asset_data( - current_context_data, current_context_profiles, loaders_by_name - ) - if loaded_data: - loaded_containers.append(loaded_data) - - # - Linked assets container - for linked_asset_data in prepared_entities.values(): - loaded_data = self.load_containers_by_asset_data( - linked_asset_data, link_context_profiles, loaders_by_name - ) - if loaded_data: - loaded_containers.append(loaded_data) - - # Return list of loaded containers - return loaded_containers - - @with_pipeline_io - def get_build_presets(self, task_name, asset_doc): - """ Returns presets to build workfile for task name. - - Presets are loaded for current project set in - io.Session["AVALON_PROJECT"], filtered by registered host - and entered task name. - - Args: - task_name (str): Task name used for filtering build presets. - - Returns: - (dict): preset per entered task name - """ - host_name = os.environ["AVALON_APP"] - project_settings = get_project_settings( - legacy_io.Session["AVALON_PROJECT"] - ) - - host_settings = project_settings.get(host_name) or {} - # Get presets for host - wb_settings = host_settings.get("workfile_builder") - if not wb_settings: - # backward compatibility - wb_settings = host_settings.get("workfile_build") or {} - - builder_profiles = wb_settings.get("profiles") - if not builder_profiles: - return None - - task_type = ( - asset_doc - .get("data", {}) - .get("tasks", {}) - .get(task_name, {}) - .get("type") - ) - filter_data = { - "task_types": task_type, - "tasks": task_name - } - return filter_profiles(builder_profiles, filter_data) - - def _filter_build_profiles(self, build_profiles, loaders_by_name): - """ Filter build profiles by loaders and prepare process data. - - Valid profile must have "loaders", "families" and "repre_names" keys - with valid values. - - "loaders" expects list of strings representing possible loaders. - - "families" expects list of strings for filtering - by main subset family. - - "repre_names" expects list of strings for filtering by - representation name. - - Lowered "families" and "repre_names" are prepared for each profile with - all required keys. - - Args: - build_profiles (dict): Profiles for building workfile. - loaders_by_name (dict): Available loaders per name. - - Returns: - (list): Filtered and prepared profiles. - """ - valid_profiles = [] - for profile in build_profiles: - # Check loaders - profile_loaders = profile.get("loaders") - if not profile_loaders: - self.log.warning(( - "Build profile has missing loaders configuration: {0}" - ).format(json.dumps(profile, indent=4))) - continue - - # Check if any loader is available - loaders_match = False - for loader_name in profile_loaders: - if loader_name in loaders_by_name: - loaders_match = True - break - - if not loaders_match: - self.log.warning(( - "All loaders from Build profile are not available: {0}" - ).format(json.dumps(profile, indent=4))) - continue - - # Check families - profile_families = profile.get("families") - if not profile_families: - self.log.warning(( - "Build profile is missing families configuration: {0}" - ).format(json.dumps(profile, indent=4))) - continue - - # Check representation names - profile_repre_names = profile.get("repre_names") - if not profile_repre_names: - self.log.warning(( - "Build profile is missing" - " representation names filtering: {0}" - ).format(json.dumps(profile, indent=4))) - continue - - # Prepare lowered families and representation names - profile["families_lowered"] = [ - fam.lower() for fam in profile_families - ] - profile["repre_names_lowered"] = [ - name.lower() for name in profile_repre_names - ] - - valid_profiles.append(profile) - - return valid_profiles - - def _prepare_profile_for_subsets(self, subsets, profiles): - """Select profile for each subset by it's data. - - Profiles are filtered for each subset individually. - Profile is filtered by subset's family, optionally by name regex and - representation names set in profile. - It is possible to not find matching profile for subset, in that case - subset is skipped and it is possible that none of subsets have - matching profile. - - Args: - subsets (list): Subset documents. - profiles (dict): Build profiles. - - Returns: - (dict) Profile by subset's id. - """ - # Prepare subsets - subsets_by_family = self.map_subsets_by_family(subsets) - - profiles_per_subset_id = {} - for family, subsets in subsets_by_family.items(): - family_low = family.lower() - for profile in profiles: - # Skip profile if does not contain family - if family_low not in profile["families_lowered"]: - continue - - # Precompile name filters as regexes - profile_regexes = profile.get("subset_name_filters") - if profile_regexes: - _profile_regexes = [] - for regex in profile_regexes: - _profile_regexes.append(re.compile(regex)) - profile_regexes = _profile_regexes - - # TODO prepare regex compilation - for subset in subsets: - # Verify regex filtering (optional) - if profile_regexes: - valid = False - for pattern in profile_regexes: - if re.match(pattern, subset["name"]): - valid = True - break - - if not valid: - continue - - profiles_per_subset_id[subset["_id"]] = profile - - # break profiles loop on finding the first matching profile - break - return profiles_per_subset_id - - def load_containers_by_asset_data( - self, asset_entity_data, build_profiles, loaders_by_name - ): - """Load containers for entered asset entity by Build profiles. - - Args: - asset_entity_data (dict): Prepared data with subsets, last version - and representations for specific asset. - build_profiles (dict): Build profiles. - loaders_by_name (dict): Available loaders per name. - - Returns: - (dict) Output contains asset document and loaded containers. - """ - - # Make sure all data are not empty - if not asset_entity_data or not build_profiles or not loaders_by_name: - return - - asset_entity = asset_entity_data["asset_entity"] - - valid_profiles = self._filter_build_profiles( - build_profiles, loaders_by_name - ) - if not valid_profiles: - self.log.warning( - "There are not valid Workfile profiles. Skipping process." - ) - return - - self.log.debug("Valid Workfile profiles: {}".format(valid_profiles)) - - subsets_by_id = {} - version_by_subset_id = {} - repres_by_version_id = {} - for subset_id, in_data in asset_entity_data["subsets"].items(): - subset_entity = in_data["subset_entity"] - subsets_by_id[subset_entity["_id"]] = subset_entity - - version_data = in_data["version"] - version_entity = version_data["version_entity"] - version_by_subset_id[subset_id] = version_entity - repres_by_version_id[version_entity["_id"]] = ( - version_data["repres"] - ) - - if not subsets_by_id: - self.log.warning("There are not subsets for asset {0}".format( - asset_entity["name"] - )) - return - - profiles_per_subset_id = self._prepare_profile_for_subsets( - subsets_by_id.values(), valid_profiles - ) - if not profiles_per_subset_id: - self.log.warning("There are not valid subsets.") - return - - valid_repres_by_subset_id = collections.defaultdict(list) - for subset_id, profile in profiles_per_subset_id.items(): - profile_repre_names = profile["repre_names_lowered"] - - version_entity = version_by_subset_id[subset_id] - version_id = version_entity["_id"] - repres = repres_by_version_id[version_id] - for repre in repres: - repre_name_low = repre["name"].lower() - if repre_name_low in profile_repre_names: - valid_repres_by_subset_id[subset_id].append(repre) - - # DEBUG message - msg = "Valid representations for Asset: `{}`".format( - asset_entity["name"] - ) - for subset_id, repres in valid_repres_by_subset_id.items(): - subset = subsets_by_id[subset_id] - msg += "\n# Subset Name/ID: `{}`/{}".format( - subset["name"], subset_id - ) - for repre in repres: - msg += "\n## Repre name: `{}`".format(repre["name"]) - - self.log.debug(msg) - - containers = self._load_containers( - valid_repres_by_subset_id, subsets_by_id, - profiles_per_subset_id, loaders_by_name - ) - - return { - "asset_entity": asset_entity, - "containers": containers - } - - @with_pipeline_io - def _load_containers( - self, repres_by_subset_id, subsets_by_id, - profiles_per_subset_id, loaders_by_name - ): - """Real load by collected data happens here. - - Loading of representations per subset happens here. Each subset can - loads one representation. Loading is tried in specific order. - Representations are tried to load by names defined in configuration. - If subset has representation matching representation name each loader - is tried to load it until any is successful. If none of them was - successful then next representation name is tried. - Subset process loop ends when any representation is loaded or - all matching representations were already tried. - - Args: - repres_by_subset_id (dict): Available representations mapped - by their parent (subset) id. - subsets_by_id (dict): Subset documents mapped by their id. - profiles_per_subset_id (dict): Build profiles mapped by subset id. - loaders_by_name (dict): Available loaders per name. - - Returns: - (list) Objects of loaded containers. - """ - from openpype.pipeline import ( - IncompatibleLoaderError, - load_container, - ) - - loaded_containers = [] - - # Get subset id order from build presets. - build_presets = self.build_presets.get("current_context", []) - build_presets += self.build_presets.get("linked_assets", []) - subset_ids_ordered = [] - for preset in build_presets: - for preset_family in preset["families"]: - for id, subset in subsets_by_id.items(): - if preset_family not in subset["data"].get("families", []): - continue - - subset_ids_ordered.append(id) - - # Order representations from subsets. - print("repres_by_subset_id", repres_by_subset_id) - representations_ordered = [] - representations = [] - for id in subset_ids_ordered: - for subset_id, repres in repres_by_subset_id.items(): - if repres in representations: - continue - - if id == subset_id: - representations_ordered.append((subset_id, repres)) - representations.append(repres) - - print("representations", representations) - - # Load ordered representations. - for subset_id, repres in representations_ordered: - subset_name = subsets_by_id[subset_id]["name"] - - profile = profiles_per_subset_id[subset_id] - loaders_last_idx = len(profile["loaders"]) - 1 - repre_names_last_idx = len(profile["repre_names_lowered"]) - 1 - - repre_by_low_name = { - repre["name"].lower(): repre for repre in repres - } - - is_loaded = False - for repre_name_idx, profile_repre_name in enumerate( - profile["repre_names_lowered"] - ): - # Break iteration if representation was already loaded - if is_loaded: - break - - repre = repre_by_low_name.get(profile_repre_name) - if not repre: - continue - - for loader_idx, loader_name in enumerate(profile["loaders"]): - if is_loaded: - break - - loader = loaders_by_name.get(loader_name) - if not loader: - continue - try: - container = load_container( - loader, - repre["_id"], - name=subset_name - ) - loaded_containers.append(container) - is_loaded = True - - except Exception as exc: - if exc == IncompatibleLoaderError: - self.log.info(( - "Loader `{}` is not compatible with" - " representation `{}`" - ).format(loader_name, repre["name"])) - - else: - self.log.error( - "Unexpected error happened during loading", - exc_info=True - ) - - msg = "Loading failed." - if loader_idx < loaders_last_idx: - msg += " Trying next loader." - elif repre_name_idx < repre_names_last_idx: - msg += ( - " Loading of subset `{}` was not successful." - ).format(subset_name) - else: - msg += " Trying next representation." - self.log.info(msg) - - return loaded_containers - - @with_pipeline_io - def _collect_last_version_repres(self, asset_docs): - """Collect subsets, versions and representations for asset_entities. - - Args: - asset_entities (list): Asset entities for which want to find data - - Returns: - (dict): collected entities - - Example output: - ``` - { - {Asset ID}: { - "asset_entity": , - "subsets": { - {Subset ID}: { - "subset_entity": , - "version": { - "version_entity": , - "repres": [ - , , ... - ] - } - }, - ... - } - }, - ... - } - output[asset_id]["subsets"][subset_id]["version"]["repres"] - ``` - """ - - output = {} - if not asset_docs: - return output - - asset_docs_by_ids = {asset["_id"]: asset for asset in asset_docs} - - project_name = legacy_io.active_project() - subsets = list(get_subsets( - project_name, asset_ids=asset_docs_by_ids.keys() - )) - subset_entity_by_ids = {subset["_id"]: subset for subset in subsets} - - last_version_by_subset_id = get_last_versions( - project_name, subset_entity_by_ids.keys() - ) - last_version_docs_by_id = { - version["_id"]: version - for version in last_version_by_subset_id.values() - } - repre_docs = get_representations( - project_name, version_ids=last_version_docs_by_id.keys() - ) - - for repre_doc in repre_docs: - version_id = repre_doc["parent"] - version_doc = last_version_docs_by_id[version_id] - - subset_id = version_doc["parent"] - subset_doc = subset_entity_by_ids[subset_id] - - asset_id = subset_doc["parent"] - asset_doc = asset_docs_by_ids[asset_id] - - if asset_id not in output: - output[asset_id] = { - "asset_entity": asset_doc, - "subsets": {} - } - - if subset_id not in output[asset_id]["subsets"]: - output[asset_id]["subsets"][subset_id] = { - "subset_entity": subset_doc, - "version": { - "version_entity": version_doc, - "repres": [] - } - } - - output[asset_id]["subsets"][subset_id]["version"]["repres"].append( - repre_doc - ) - - return output + return BuildWorkfile() @with_pipeline_io diff --git a/openpype/pipeline/workfile/__init__.py b/openpype/pipeline/workfile/__init__.py index dc4955f7af..3bc125cfc4 100644 --- a/openpype/pipeline/workfile/__init__.py +++ b/openpype/pipeline/workfile/__init__.py @@ -8,6 +8,8 @@ from .path_resolving import ( get_last_workfile, ) +from .build_workfile import BuildWorkfile + __all__ = ( "get_workfile_template_key_from_context", @@ -17,4 +19,6 @@ __all__ = ( "get_last_workfile_with_version", "get_last_workfile", + + "BuildWorkfile", ) diff --git a/openpype/pipeline/workfile/build_workfile.py b/openpype/pipeline/workfile/build_workfile.py new file mode 100644 index 0000000000..bb6fcb4189 --- /dev/null +++ b/openpype/pipeline/workfile/build_workfile.py @@ -0,0 +1,693 @@ +import os +import re +import collections +import json + +from openpype.client import ( + get_asset_by_name, + get_subsets, + get_last_versions, + get_representations, +) +from openpype.settings import get_project_settings +from openpype.lib import ( + get_linked_assets, + filter_profiles, + Logger, +) +from openpype.pipeline import legacy_io +from openpype.pipeline.load import ( + discover_loader_plugins, + IncompatibleLoaderError, + load_container, +) + + +class BuildWorkfile: + """Wrapper for build workfile process. + + Load representations for current context by build presets. Build presets + are host related, since each host has it's loaders. + """ + + _log = None + + @property + def log(self): + if self._log is None: + self._log = Logger.get_logger(self.__class__.__name__) + return self._log + + @staticmethod + def map_subsets_by_family(subsets): + subsets_by_family = collections.defaultdict(list) + for subset in subsets: + family = subset["data"].get("family") + if not family: + families = subset["data"].get("families") + if not families: + continue + family = families[0] + + subsets_by_family[family].append(subset) + return subsets_by_family + + def process(self): + """Main method of this wrapper. + + Building of workfile is triggered and is possible to implement + post processing of loaded containers if necessary. + + Returns: + List[Dict[str, Any]]: Loaded containers during build. + """ + + return self.build_workfile() + + def build_workfile(self): + """Prepares and load containers into workfile. + + Loads latest versions of current and linked assets to workfile by logic + stored in Workfile profiles from presets. Profiles are set by host, + filtered by current task name and used by families. + + Each family can specify representation names and loaders for + representations and first available and successful loaded + representation is returned as container. + + At the end you'll get list of loaded containers per each asset. + + loaded_containers [{ + "asset_entity": , + "containers": [, , ...] + }, { + "asset_entity": , + "containers": [, ...] + }, { + ... + }] + + Returns: + List[Dict[str, Any]]: Loaded containers during build. + """ + + loaded_containers = [] + + # Get current asset name and entity + project_name = legacy_io.active_project() + current_asset_name = legacy_io.Session["AVALON_ASSET"] + current_asset_entity = get_asset_by_name( + project_name, current_asset_name + ) + # Skip if asset was not found + if not current_asset_entity: + print("Asset entity with name `{}` was not found".format( + current_asset_name + )) + return loaded_containers + + # Prepare available loaders + loaders_by_name = {} + for loader in discover_loader_plugins(): + loader_name = loader.__name__ + if loader_name in loaders_by_name: + raise KeyError( + "Duplicated loader name {0}!".format(loader_name) + ) + loaders_by_name[loader_name] = loader + + # Skip if there are any loaders + if not loaders_by_name: + self.log.warning("There are no registered loaders.") + return loaded_containers + + # Get current task name + current_task_name = legacy_io.Session["AVALON_TASK"] + + # Load workfile presets for task + self.build_presets = self.get_build_presets( + current_task_name, current_asset_entity + ) + + # Skip if there are any presets for task + if not self.build_presets: + self.log.warning( + "Current task `{}` does not have any loading preset.".format( + current_task_name + ) + ) + return loaded_containers + + # Get presets for loading current asset + current_context_profiles = self.build_presets.get("current_context") + # Get presets for loading linked assets + link_context_profiles = self.build_presets.get("linked_assets") + # Skip if both are missing + if not current_context_profiles and not link_context_profiles: + self.log.warning( + "Current task `{}` has empty loading preset.".format( + current_task_name + ) + ) + return loaded_containers + + elif not current_context_profiles: + self.log.warning(( + "Current task `{}` doesn't have any loading" + " preset for it's context." + ).format(current_task_name)) + + elif not link_context_profiles: + self.log.warning(( + "Current task `{}` doesn't have any" + "loading preset for it's linked assets." + ).format(current_task_name)) + + # Prepare assets to process by workfile presets + assets = [] + current_asset_id = None + if current_context_profiles: + # Add current asset entity if preset has current context set + assets.append(current_asset_entity) + current_asset_id = current_asset_entity["_id"] + + if link_context_profiles: + # Find and append linked assets if preset has set linked mapping + link_assets = get_linked_assets(current_asset_entity) + if link_assets: + assets.extend(link_assets) + + # Skip if there are no assets. This can happen if only linked mapping + # is set and there are no links for his asset. + if not assets: + self.log.warning( + "Asset does not have linked assets. Nothing to process." + ) + return loaded_containers + + # Prepare entities from database for assets + prepared_entities = self._collect_last_version_repres(assets) + + # Load containers by prepared entities and presets + # - Current asset containers + if current_asset_id and current_asset_id in prepared_entities: + current_context_data = prepared_entities.pop(current_asset_id) + loaded_data = self.load_containers_by_asset_data( + current_context_data, current_context_profiles, loaders_by_name + ) + if loaded_data: + loaded_containers.append(loaded_data) + + # - Linked assets container + for linked_asset_data in prepared_entities.values(): + loaded_data = self.load_containers_by_asset_data( + linked_asset_data, link_context_profiles, loaders_by_name + ) + if loaded_data: + loaded_containers.append(loaded_data) + + # Return list of loaded containers + return loaded_containers + + def get_build_presets(self, task_name, asset_doc): + """ Returns presets to build workfile for task name. + + Presets are loaded for current project set in + io.Session["AVALON_PROJECT"], filtered by registered host + and entered task name. + + Args: + task_name (str): Task name used for filtering build presets. + + Returns: + Dict[str, Any]: preset per entered task name + """ + + host_name = os.environ["AVALON_APP"] + project_settings = get_project_settings( + legacy_io.Session["AVALON_PROJECT"] + ) + + host_settings = project_settings.get(host_name) or {} + # Get presets for host + wb_settings = host_settings.get("workfile_builder") + if not wb_settings: + # backward compatibility + wb_settings = host_settings.get("workfile_build") or {} + + builder_profiles = wb_settings.get("profiles") + if not builder_profiles: + return None + + task_type = ( + asset_doc + .get("data", {}) + .get("tasks", {}) + .get(task_name, {}) + .get("type") + ) + filter_data = { + "task_types": task_type, + "tasks": task_name + } + return filter_profiles(builder_profiles, filter_data) + + def _filter_build_profiles(self, build_profiles, loaders_by_name): + """ Filter build profiles by loaders and prepare process data. + + Valid profile must have "loaders", "families" and "repre_names" keys + with valid values. + - "loaders" expects list of strings representing possible loaders. + - "families" expects list of strings for filtering + by main subset family. + - "repre_names" expects list of strings for filtering by + representation name. + + Lowered "families" and "repre_names" are prepared for each profile with + all required keys. + + Args: + build_profiles (Dict[str, Any]): Profiles for building workfile. + loaders_by_name (Dict[str, LoaderPlugin]): Available loaders + per name. + + Returns: + List[Dict[str, Any]]: Filtered and prepared profiles. + """ + + valid_profiles = [] + for profile in build_profiles: + # Check loaders + profile_loaders = profile.get("loaders") + if not profile_loaders: + self.log.warning(( + "Build profile has missing loaders configuration: {0}" + ).format(json.dumps(profile, indent=4))) + continue + + # Check if any loader is available + loaders_match = False + for loader_name in profile_loaders: + if loader_name in loaders_by_name: + loaders_match = True + break + + if not loaders_match: + self.log.warning(( + "All loaders from Build profile are not available: {0}" + ).format(json.dumps(profile, indent=4))) + continue + + # Check families + profile_families = profile.get("families") + if not profile_families: + self.log.warning(( + "Build profile is missing families configuration: {0}" + ).format(json.dumps(profile, indent=4))) + continue + + # Check representation names + profile_repre_names = profile.get("repre_names") + if not profile_repre_names: + self.log.warning(( + "Build profile is missing" + " representation names filtering: {0}" + ).format(json.dumps(profile, indent=4))) + continue + + # Prepare lowered families and representation names + profile["families_lowered"] = [ + fam.lower() for fam in profile_families + ] + profile["repre_names_lowered"] = [ + name.lower() for name in profile_repre_names + ] + + valid_profiles.append(profile) + + return valid_profiles + + def _prepare_profile_for_subsets(self, subsets, profiles): + """Select profile for each subset by it's data. + + Profiles are filtered for each subset individually. + Profile is filtered by subset's family, optionally by name regex and + representation names set in profile. + It is possible to not find matching profile for subset, in that case + subset is skipped and it is possible that none of subsets have + matching profile. + + Args: + subsets (List[Dict[str, Any]]): Subset documents. + profiles (List[Dict[str, Any]]): Build profiles. + + Returns: + Dict[str, Any]: Profile by subset's id. + """ + + # Prepare subsets + subsets_by_family = self.map_subsets_by_family(subsets) + + profiles_per_subset_id = {} + for family, subsets in subsets_by_family.items(): + family_low = family.lower() + for profile in profiles: + # Skip profile if does not contain family + if family_low not in profile["families_lowered"]: + continue + + # Precompile name filters as regexes + profile_regexes = profile.get("subset_name_filters") + if profile_regexes: + _profile_regexes = [] + for regex in profile_regexes: + _profile_regexes.append(re.compile(regex)) + profile_regexes = _profile_regexes + + # TODO prepare regex compilation + for subset in subsets: + # Verify regex filtering (optional) + if profile_regexes: + valid = False + for pattern in profile_regexes: + if re.match(pattern, subset["name"]): + valid = True + break + + if not valid: + continue + + profiles_per_subset_id[subset["_id"]] = profile + + # break profiles loop on finding the first matching profile + break + return profiles_per_subset_id + + def load_containers_by_asset_data( + self, asset_entity_data, build_profiles, loaders_by_name + ): + """Load containers for entered asset entity by Build profiles. + + Args: + asset_entity_data (Dict[str, Any]): Prepared data with subsets, + last versions and representations for specific asset. + build_profiles (Dict[str, Any]): Build profiles. + loaders_by_name (Dict[str, LoaderPlugin]): Available loaders + per name. + + Returns: + Dict[str, Any]: Output contains asset document + and loaded containers. + """ + + # Make sure all data are not empty + if not asset_entity_data or not build_profiles or not loaders_by_name: + return + + asset_entity = asset_entity_data["asset_entity"] + + valid_profiles = self._filter_build_profiles( + build_profiles, loaders_by_name + ) + if not valid_profiles: + self.log.warning( + "There are not valid Workfile profiles. Skipping process." + ) + return + + self.log.debug("Valid Workfile profiles: {}".format(valid_profiles)) + + subsets_by_id = {} + version_by_subset_id = {} + repres_by_version_id = {} + for subset_id, in_data in asset_entity_data["subsets"].items(): + subset_entity = in_data["subset_entity"] + subsets_by_id[subset_entity["_id"]] = subset_entity + + version_data = in_data["version"] + version_entity = version_data["version_entity"] + version_by_subset_id[subset_id] = version_entity + repres_by_version_id[version_entity["_id"]] = ( + version_data["repres"] + ) + + if not subsets_by_id: + self.log.warning("There are not subsets for asset {0}".format( + asset_entity["name"] + )) + return + + profiles_per_subset_id = self._prepare_profile_for_subsets( + subsets_by_id.values(), valid_profiles + ) + if not profiles_per_subset_id: + self.log.warning("There are not valid subsets.") + return + + valid_repres_by_subset_id = collections.defaultdict(list) + for subset_id, profile in profiles_per_subset_id.items(): + profile_repre_names = profile["repre_names_lowered"] + + version_entity = version_by_subset_id[subset_id] + version_id = version_entity["_id"] + repres = repres_by_version_id[version_id] + for repre in repres: + repre_name_low = repre["name"].lower() + if repre_name_low in profile_repre_names: + valid_repres_by_subset_id[subset_id].append(repre) + + # DEBUG message + msg = "Valid representations for Asset: `{}`".format( + asset_entity["name"] + ) + for subset_id, repres in valid_repres_by_subset_id.items(): + subset = subsets_by_id[subset_id] + msg += "\n# Subset Name/ID: `{}`/{}".format( + subset["name"], subset_id + ) + for repre in repres: + msg += "\n## Repre name: `{}`".format(repre["name"]) + + self.log.debug(msg) + + containers = self._load_containers( + valid_repres_by_subset_id, subsets_by_id, + profiles_per_subset_id, loaders_by_name + ) + + return { + "asset_entity": asset_entity, + "containers": containers + } + + def _load_containers( + self, repres_by_subset_id, subsets_by_id, + profiles_per_subset_id, loaders_by_name + ): + """Real load by collected data happens here. + + Loading of representations per subset happens here. Each subset can + loads one representation. Loading is tried in specific order. + Representations are tried to load by names defined in configuration. + If subset has representation matching representation name each loader + is tried to load it until any is successful. If none of them was + successful then next representation name is tried. + Subset process loop ends when any representation is loaded or + all matching representations were already tried. + + Args: + repres_by_subset_id (Dict[str, Dict[str, Any]]): Available + representations mapped by their parent (subset) id. + subsets_by_id (Dict[str, Dict[str, Any]]): Subset documents + mapped by their id. + profiles_per_subset_id (Dict[str, Dict[str, Any]]): Build profiles + mapped by subset id. + loaders_by_name (Dict[str, LoaderPlugin]): Available loaders + per name. + + Returns: + List[Dict[str, Any]]: Objects of loaded containers. + """ + + loaded_containers = [] + + # Get subset id order from build presets. + build_presets = self.build_presets.get("current_context", []) + build_presets += self.build_presets.get("linked_assets", []) + subset_ids_ordered = [] + for preset in build_presets: + for preset_family in preset["families"]: + for id, subset in subsets_by_id.items(): + if preset_family not in subset["data"].get("families", []): + continue + + subset_ids_ordered.append(id) + + # Order representations from subsets. + print("repres_by_subset_id", repres_by_subset_id) + representations_ordered = [] + representations = [] + for id in subset_ids_ordered: + for subset_id, repres in repres_by_subset_id.items(): + if repres in representations: + continue + + if id == subset_id: + representations_ordered.append((subset_id, repres)) + representations.append(repres) + + print("representations", representations) + + # Load ordered representations. + for subset_id, repres in representations_ordered: + subset_name = subsets_by_id[subset_id]["name"] + + profile = profiles_per_subset_id[subset_id] + loaders_last_idx = len(profile["loaders"]) - 1 + repre_names_last_idx = len(profile["repre_names_lowered"]) - 1 + + repre_by_low_name = { + repre["name"].lower(): repre for repre in repres + } + + is_loaded = False + for repre_name_idx, profile_repre_name in enumerate( + profile["repre_names_lowered"] + ): + # Break iteration if representation was already loaded + if is_loaded: + break + + repre = repre_by_low_name.get(profile_repre_name) + if not repre: + continue + + for loader_idx, loader_name in enumerate(profile["loaders"]): + if is_loaded: + break + + loader = loaders_by_name.get(loader_name) + if not loader: + continue + try: + container = load_container( + loader, + repre["_id"], + name=subset_name + ) + loaded_containers.append(container) + is_loaded = True + + except Exception as exc: + if exc == IncompatibleLoaderError: + self.log.info(( + "Loader `{}` is not compatible with" + " representation `{}`" + ).format(loader_name, repre["name"])) + + else: + self.log.error( + "Unexpected error happened during loading", + exc_info=True + ) + + msg = "Loading failed." + if loader_idx < loaders_last_idx: + msg += " Trying next loader." + elif repre_name_idx < repre_names_last_idx: + msg += ( + " Loading of subset `{}` was not successful." + ).format(subset_name) + else: + msg += " Trying next representation." + self.log.info(msg) + + return loaded_containers + + def _collect_last_version_repres(self, asset_docs): + """Collect subsets, versions and representations for asset_entities. + + Args: + asset_docs (List[Dict[str, Any]]): Asset entities for which + want to find data. + + Returns: + Dict[str, Any]: collected entities + + Example output: + ``` + { + {Asset ID}: { + "asset_entity": , + "subsets": { + {Subset ID}: { + "subset_entity": , + "version": { + "version_entity": , + "repres": [ + , , ... + ] + } + }, + ... + } + }, + ... + } + output[asset_id]["subsets"][subset_id]["version"]["repres"] + ``` + """ + + output = {} + if not asset_docs: + return output + + asset_docs_by_ids = {asset["_id"]: asset for asset in asset_docs} + + project_name = legacy_io.active_project() + subsets = list(get_subsets( + project_name, asset_ids=asset_docs_by_ids.keys() + )) + subset_entity_by_ids = {subset["_id"]: subset for subset in subsets} + + last_version_by_subset_id = get_last_versions( + project_name, subset_entity_by_ids.keys() + ) + last_version_docs_by_id = { + version["_id"]: version + for version in last_version_by_subset_id.values() + } + repre_docs = get_representations( + project_name, version_ids=last_version_docs_by_id.keys() + ) + + for repre_doc in repre_docs: + version_id = repre_doc["parent"] + version_doc = last_version_docs_by_id[version_id] + + subset_id = version_doc["parent"] + subset_doc = subset_entity_by_ids[subset_id] + + asset_id = subset_doc["parent"] + asset_doc = asset_docs_by_ids[asset_id] + + if asset_id not in output: + output[asset_id] = { + "asset_entity": asset_doc, + "subsets": {} + } + + if subset_id not in output[asset_id]["subsets"]: + output[asset_id]["subsets"][subset_id] = { + "subset_entity": subset_doc, + "version": { + "version_entity": version_doc, + "repres": [] + } + } + + output[asset_id]["subsets"][subset_id]["version"]["repres"].append( + repre_doc + ) + + return output From 65268fbc09e946aaa623ed178773fa2fa2961ac4 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 9 Aug 2022 15:11:33 +0200 Subject: [PATCH 739/785] changed import of 'BuildWorkfile' in code --- openpype/hosts/maya/api/menu.py | 2 +- openpype/hosts/nuke/api/lib.py | 2 +- openpype/hosts/nuke/api/pipeline.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/maya/api/menu.py b/openpype/hosts/maya/api/menu.py index c3ce8b0227..b7ab529a55 100644 --- a/openpype/hosts/maya/api/menu.py +++ b/openpype/hosts/maya/api/menu.py @@ -6,9 +6,9 @@ from Qt import QtWidgets, QtGui import maya.utils import maya.cmds as cmds -from openpype.api import BuildWorkfile from openpype.settings import get_project_settings from openpype.pipeline import legacy_io +from openpype.pipeline.workfile import BuildWorkfile from openpype.tools.utils import host_tools from openpype.hosts.maya.api import lib, lib_rendersettings from .lib import get_main_window, IS_HEADLESS diff --git a/openpype/hosts/nuke/api/lib.py b/openpype/hosts/nuke/api/lib.py index 501ab4ba93..cf659344f0 100644 --- a/openpype/hosts/nuke/api/lib.py +++ b/openpype/hosts/nuke/api/lib.py @@ -21,7 +21,6 @@ from openpype.client import ( ) from openpype.api import ( Logger, - BuildWorkfile, get_version_from_path, get_current_project_settings, ) @@ -40,6 +39,7 @@ from openpype.pipeline import ( Anatomy, ) from openpype.pipeline.context_tools import get_current_project_asset +from openpype.pipeline.workfile import BuildWorkfile from . import gizmo_menu diff --git a/openpype/hosts/nuke/api/pipeline.py b/openpype/hosts/nuke/api/pipeline.py index 0afc56d2f7..c1cd8f771a 100644 --- a/openpype/hosts/nuke/api/pipeline.py +++ b/openpype/hosts/nuke/api/pipeline.py @@ -9,7 +9,6 @@ import pyblish.api import openpype from openpype.api import ( Logger, - BuildWorkfile, get_current_project_settings ) from openpype.lib import register_event_callback @@ -22,6 +21,7 @@ from openpype.pipeline import ( deregister_inventory_action_path, AVALON_CONTAINER_ID, ) +from openpype.pipeline.workfile import BuildWorkfile from openpype.tools.utils import host_tools from .command import viewer_update_and_undo_stop From 4db98639274917c908c5866c49c477779eb69d96 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 9 Aug 2022 16:17:00 +0200 Subject: [PATCH 740/785] moved 'get_custom_workfile_template' and 'get_custom_workfile_template_by_string_context' to 'openpype.pipeline.workfile' --- openpype/pipeline/workfile/__init__.py | 6 + openpype/pipeline/workfile/path_resolving.py | 185 +++++++++++++++++-- 2 files changed, 176 insertions(+), 15 deletions(-) diff --git a/openpype/pipeline/workfile/__init__.py b/openpype/pipeline/workfile/__init__.py index 3bc125cfc4..0aad29b6f9 100644 --- a/openpype/pipeline/workfile/__init__.py +++ b/openpype/pipeline/workfile/__init__.py @@ -6,6 +6,9 @@ from .path_resolving import ( get_last_workfile_with_version, get_last_workfile, + + get_custom_workfile_template, + get_custom_workfile_template_by_string_context, ) from .build_workfile import BuildWorkfile @@ -20,5 +23,8 @@ __all__ = ( "get_last_workfile_with_version", "get_last_workfile", + "get_custom_workfile_template", + "get_custom_workfile_template_by_string_context", + "BuildWorkfile", ) diff --git a/openpype/pipeline/workfile/path_resolving.py b/openpype/pipeline/workfile/path_resolving.py index 7362902bcd..6740b710f5 100644 --- a/openpype/pipeline/workfile/path_resolving.py +++ b/openpype/pipeline/workfile/path_resolving.py @@ -3,9 +3,13 @@ import re import copy import platform -from openpype.client import get_asset_by_name +from openpype.client import get_project, get_asset_by_name from openpype.settings import get_project_settings -from openpype.lib import filter_profiles, StringTemplate +from openpype.lib import ( + filter_profiles, + Logger, + StringTemplate, +) from openpype.pipeline import Anatomy from openpype.pipeline.template_data import get_template_data @@ -189,11 +193,20 @@ def get_last_workfile_with_version( ): """Return last workfile version. + Usign workfile template and it's filling data find most possible last + version of workfile which was created for the context. + + Functionality is fully based on knowing which keys are optional or what + values are expected as value. + + The last modified file is used if more files can be considered as + last workfile. + Args: - workdir(str): Path to dir where workfiles are stored. - file_template(str): Template of file name. - fill_data(Dict[str, Any]): Data for filling template. - extensions(Iterable[str]): All allowed file extensions of workfile. + workdir (str): Path to dir where workfiles are stored. + file_template (str): Template of file name. + fill_data (Dict[str, Any]): Data for filling template. + extensions (Iterable[str]): All allowed file extensions of workfile. Returns: Tuple[Union[str, None], Union[int, None]]: Last workfile with version @@ -203,23 +216,26 @@ def get_last_workfile_with_version( if not os.path.exists(workdir): return None, None + dotted_extensions = { + ".{}".format(ext) + for ext in extensions + if not ext.startswith(".") + } # Fast match on extension filenames = [ filename for filename in os.listdir(workdir) - if os.path.splitext(filename)[1] in extensions + if os.path.splitext(filename)[1] in dotted_extensions ] # Build template without optionals, version to digits only regex # and comment to any definable value. - _ext = [] - for ext in extensions: - if not ext.startswith("."): - ext = "." + ext - # Escape dot for regex - ext = "\\" + ext - _ext.append(ext) - ext_expression = "(?:" + "|".join(_ext) + ")" + # Escape extensions dot for regex + regex_exts = [ + "\\" + ext + for ext in dotted_extensions + ] + ext_expression = "(?:" + "|".join(regex_exts) + ")" # Replace `.{ext}` with `{ext}` so we are sure there is not dot at the end file_template = re.sub(r"\.?{ext}", ext_expression, file_template) @@ -306,3 +322,142 @@ def get_last_workfile( return os.path.normpath(os.path.join(workdir, filename)) return filename + + +def get_custom_workfile_template( + project_doc, + asset_doc, + task_name, + host_name, + anatomy=None, + project_settings=None +): + """Filter and fill workfile template profiles by passed context. + + Custom workfile template can be used as first version of workfiles. + Template is a file on a disk which is set in settings. Expected settings + structure to have this feature enabled is: + project settings + |- + |- workfile_builder + |- create_first_version - a bool which must be set to 'True' + |- custom_templates - profiles based on task name/type which + points to a file which is copied as + first workfile + + It is expected that passed argument are already queried documents of + project and asset as parents of processing task name. + + Args: + project_doc (Dict[str, Any]): Project document from MongoDB. + asset_doc (Dict[str, Any]): Asset document from MongoDB. + task_name (str): Name of task for which templates are filtered. + host_name (str): Name of host. + anatomy (Anatomy): Optionally passed anatomy object for passed project + name. + project_settings(Dict[str, Any]): Preloaded project settings. + + Returns: + str: Path to template or None if none of profiles match current + context. Existence of formatted path is not validated. + None: If no profile is matching context. + """ + + log = Logger.get_logger("CustomWorkfileResolve") + + project_name = project_doc["name"] + if project_settings is None: + project_settings = get_project_settings(project_name) + + host_settings = project_settings.get(host_name) + if not host_settings: + log.info("Host \"{}\" doesn't have settings".format(host_name)) + return None + + workfile_builder_settings = host_settings.get("workfile_builder") + if not workfile_builder_settings: + log.info(( + "Seems like old version of settings is used." + " Can't access custom templates in host \"{}\"." + ).format(host_name)) + return + + if not workfile_builder_settings["create_first_version"]: + log.info(( + "Project \"{}\" has turned off to create first workfile for" + " host \"{}\"" + ).format(project_name, host_name)) + return + + # Backwards compatibility + template_profiles = workfile_builder_settings.get("custom_templates") + if not template_profiles: + log.info( + "Custom templates are not filled. Skipping template copy." + ) + return + + if anatomy is None: + anatomy = Anatomy(project_name) + + # get project, asset, task anatomy context data + anatomy_context_data = get_template_data( + project_doc, asset_doc, task_name, host_name + ) + # add root dict + anatomy_context_data["root"] = anatomy.roots + + # get task type for the task in context + current_task_type = anatomy_context_data["task"]["type"] + + # get path from matching profile + matching_item = filter_profiles( + template_profiles, + {"task_types": current_task_type} + ) + # when path is available try to format it in case + # there are some anatomy template strings + if matching_item: + template = matching_item["path"][platform.system().lower()] + return StringTemplate.format_strict_template( + template, anatomy_context_data + ).normalized() + + return None + + +def get_custom_workfile_template_by_string_context( + project_name, + asset_name, + task_name, + host_name, + anatomy=None, + project_settings=None +): + """Filter and fill workfile template profiles by passed context. + + Passed context are string representations of project, asset and task. + Function will query documents of project and asset to be able use + `get_custom_workfile_template` for rest of logic. + + Args: + project_name(str): Project name. + asset_name(str): Asset name. + task_name(str): Task name. + host_name (str): Name of host. + anatomy(Anatomy): Optionally prepared anatomy object for passed + project. + project_settings(Dict[str, Any]): Preloaded project settings. + + Returns: + str: Path to template or None if none of profiles match current + context. (Existence of formatted path is not validated.) + None: If no profile is matching context. + """ + + project_doc = get_project(project_name) + asset_doc = get_asset_by_name(project_name, asset_name) + + return get_custom_workfile_template( + project_doc, asset_doc, task_name, host_name, anatomy, project_settings + ) From c9289630e01245342a8ff5e7652301643638efc7 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 9 Aug 2022 16:17:23 +0200 Subject: [PATCH 741/785] moved 'get_custom_workfile_template' as 'get_custom_workfile_template_from_session' into context tools --- openpype/pipeline/context_tools.py | 35 +++++++++++++++++++++++++++++- 1 file changed, 34 insertions(+), 1 deletion(-) diff --git a/openpype/pipeline/context_tools.py b/openpype/pipeline/context_tools.py index 13185c72b2..5f763cd249 100644 --- a/openpype/pipeline/context_tools.py +++ b/openpype/pipeline/context_tools.py @@ -22,7 +22,10 @@ from openpype.settings import get_project_settings from .publish.lib import filter_pyblish_plugins from .anatomy import Anatomy from .template_data import get_template_data_with_names -from .workfile import get_workfile_template_key +from .workfile import ( + get_workfile_template_key, + get_custom_workfile_template_by_string_context, +) from . import ( legacy_io, register_loader_plugin_path, @@ -412,3 +415,33 @@ def get_workdir_from_session(session=None, template_key=None): if path: path = os.path.normpath(path) return path + + +def get_custom_workfile_template_from_session( + session=None, project_settings=None +): + """Filter and fill workfile template profiles by current context. + + Current context is defined by `legacy_io.Session`. That's why this + function should be used only inside host where context is set and stable. + + Args: + session (Union[None, Dict[str, str]]): Session from which are taken + data. + project_settings(Dict[str, Any]): Template profiles from settings. + + Returns: + str: Path to template or None if none of profiles match current + context. (Existence of formatted path is not validated.) + """ + + if session is None: + session = legacy_io.Session + + return get_custom_workfile_template_by_string_context( + session["AVALON_PROJECT"], + session["AVALON_ASSET"], + session["AVALON_TASK"], + session["AVALON_APP"], + project_settings=project_settings + ) From fbe1a773c016e94569913cbe8837deebea90bcb4 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 9 Aug 2022 16:17:39 +0200 Subject: [PATCH 742/785] marked functions in avalon context as deprecated --- openpype/lib/avalon_context.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/openpype/lib/avalon_context.py b/openpype/lib/avalon_context.py index b32c9bce6d..b970cbf4e6 100644 --- a/openpype/lib/avalon_context.py +++ b/openpype/lib/avalon_context.py @@ -528,6 +528,7 @@ def template_data_from_session(session=None): """ from openpype.pipeline.context_tools import get_template_data_from_session + return get_template_data_from_session(session) @@ -908,6 +909,8 @@ def _get_task_context_data_for_anatomy( return data +@deprecated( + "openpype.pipeline.workfile.get_custom_workfile_template_by_context") def get_custom_workfile_template_by_context( template_profiles, project_doc, asset_doc, task_name, anatomy=None ): @@ -961,6 +964,9 @@ def get_custom_workfile_template_by_context( return None +@deprecated( + "openpype.pipeline.workfile.get_custom_workfile_template_by_string_context" +) def get_custom_workfile_template_by_string_context( template_profiles, project_name, asset_name, task_name, dbcon=None, anatomy=None @@ -1005,7 +1011,7 @@ def get_custom_workfile_template_by_string_context( ) -@with_pipeline_io +@deprecated("openpype.pipeline.context_tools.get_custom_workfile_template") def get_custom_workfile_template(template_profiles): """Filter and fill workfile template profiles by current context. @@ -1020,6 +1026,8 @@ def get_custom_workfile_template(template_profiles): context. (Existence of formatted path is not validated.) """ + from openpype.pipeline import legacy_io + return get_custom_workfile_template_by_string_context( template_profiles, legacy_io.Session["AVALON_PROJECT"], From 939955339c46c0aa02634546286a5e6217bf2cd9 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 9 Aug 2022 16:18:23 +0200 Subject: [PATCH 743/785] use moved functions in code --- openpype/hooks/pre_copy_template_workfile.py | 52 +++++++------------- openpype/hosts/nuke/api/lib.py | 17 +++---- 2 files changed, 26 insertions(+), 43 deletions(-) diff --git a/openpype/hooks/pre_copy_template_workfile.py b/openpype/hooks/pre_copy_template_workfile.py index dffac22ee2..70c549919f 100644 --- a/openpype/hooks/pre_copy_template_workfile.py +++ b/openpype/hooks/pre_copy_template_workfile.py @@ -1,11 +1,11 @@ import os import shutil -from openpype.lib import ( - PreLaunchHook, - get_custom_workfile_template_by_context, +from openpype.lib import PreLaunchHook +from openpype.settings import get_project_settings +from openpype.pipeline.workfile import ( + get_custom_workfile_template, get_custom_workfile_template_by_string_context ) -from openpype.settings import get_project_settings class CopyTemplateWorkfile(PreLaunchHook): @@ -54,41 +54,22 @@ class CopyTemplateWorkfile(PreLaunchHook): project_name = self.data["project_name"] asset_name = self.data["asset_name"] task_name = self.data["task_name"] + host_name = self.application.host_name project_settings = get_project_settings(project_name) - host_settings = project_settings[self.application.host_name] - - workfile_builder_settings = host_settings.get("workfile_builder") - if not workfile_builder_settings: - # TODO remove warning when deprecated - self.log.warning(( - "Seems like old version of settings is used." - " Can't access custom templates in host \"{}\"." - ).format(self.application.full_label)) - return - - if not workfile_builder_settings["create_first_version"]: - self.log.info(( - "Project \"{}\" has turned off to create first workfile for" - " application \"{}\"" - ).format(project_name, self.application.full_label)) - return - - # Backwards compatibility - template_profiles = workfile_builder_settings.get("custom_templates") - if not template_profiles: - self.log.info( - "Custom templates are not filled. Skipping template copy." - ) - return project_doc = self.data.get("project_doc") asset_doc = self.data.get("asset_doc") anatomy = self.data.get("anatomy") if project_doc and asset_doc: self.log.debug("Started filtering of custom template paths.") - template_path = get_custom_workfile_template_by_context( - template_profiles, project_doc, asset_doc, task_name, anatomy + template_path = get_custom_workfile_template( + project_doc, + asset_doc, + task_name, + host_name, + anatomy, + project_settings ) else: @@ -96,10 +77,13 @@ class CopyTemplateWorkfile(PreLaunchHook): "Global data collection probably did not execute." " Using backup solution." )) - dbcon = self.data.get("dbcon") template_path = get_custom_workfile_template_by_string_context( - template_profiles, project_name, asset_name, task_name, - dbcon, anatomy + project_name, + asset_name, + task_name, + host_name, + anatomy, + project_settings ) if not template_path: diff --git a/openpype/hosts/nuke/api/lib.py b/openpype/hosts/nuke/api/lib.py index cf659344f0..a5f2631a02 100644 --- a/openpype/hosts/nuke/api/lib.py +++ b/openpype/hosts/nuke/api/lib.py @@ -38,7 +38,10 @@ from openpype.pipeline import ( legacy_io, Anatomy, ) -from openpype.pipeline.context_tools import get_current_project_asset +from openpype.pipeline.context_tools import ( + get_current_project_asset, + get_custom_workfile_template_from_session +) from openpype.pipeline.workfile import BuildWorkfile from . import gizmo_menu @@ -2444,15 +2447,12 @@ def _launch_workfile_app(): def process_workfile_builder(): - from openpype.lib import ( - env_value_to_bool, - get_custom_workfile_template - ) # to avoid looping of the callback, remove it! nuke.removeOnCreate(process_workfile_builder, nodeClass="Root") # get state from settings - workfile_builder = get_current_project_settings()["nuke"].get( + project_settings = get_current_project_settings() + workfile_builder = project_settings["nuke"].get( "workfile_builder", {}) # get all imortant settings @@ -2462,7 +2462,6 @@ def process_workfile_builder(): # get settings createfv_on = workfile_builder.get("create_first_version") or None - custom_templates = workfile_builder.get("custom_templates") or None builder_on = workfile_builder.get("builder_on_start") or None last_workfile_path = os.environ.get("AVALON_LAST_WORKFILE") @@ -2470,8 +2469,8 @@ def process_workfile_builder(): # generate first version in file not existing and feature is enabled if createfv_on and not os.path.exists(last_workfile_path): # get custom template path if any - custom_template_path = get_custom_workfile_template( - custom_templates + custom_template_path = get_custom_workfile_template_from_session( + project_settings=project_settings ) # if custom template is defined From 27a62892a02ea1a7f15c4c0bbea13988e80f44d3 Mon Sep 17 00:00:00 2001 From: Felix David Date: Tue, 9 Aug 2022 16:43:24 +0200 Subject: [PATCH 744/785] Kitsu|Fix: Movie project type fails & first loop children names Fix #3635 --- openpype/modules/kitsu/utils/update_op_with_zou.py | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/openpype/modules/kitsu/utils/update_op_with_zou.py b/openpype/modules/kitsu/utils/update_op_with_zou.py index 8f5566e8ec..e03cf2b30e 100644 --- a/openpype/modules/kitsu/utils/update_op_with_zou.py +++ b/openpype/modules/kitsu/utils/update_op_with_zou.py @@ -219,18 +219,23 @@ def update_op_assets( # Add parents for hierarchy item_data["parents"] = [] - while parent_zou_id is not None: - parent_doc = asset_doc_ids[parent_zou_id] + ancestor_id = parent_zou_id + while ancestor_id is not None: + parent_doc = asset_doc_ids[ancestor_id] item_data["parents"].insert(0, parent_doc["name"]) # Get parent entity parent_entity = parent_doc["data"]["zou"] - parent_zou_id = parent_entity.get("parent_id") + ancestor_id = parent_entity.get("parent_id") - if item_type in ["Shot", "Sequence"]: + # Build OpenPype compatible name + if item_type in ["Shot", "Sequence"] and parent_zou_id is not None: # Name with parents hierarchy "({episode}_){sequence}_{shot}" # to avoid duplicate name issue item_name = f"{item_data['parents'][-1]}_{item['name']}" + + # Update doc name + asset_doc_ids[item["id"]]["name"] = item_name else: item_name = item["name"] From 6ef14510e161f01713150f383b172f8d4239aa07 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 9 Aug 2022 18:00:23 +0200 Subject: [PATCH 745/785] implemented method to stop timer using web server --- .../modules/timers_manager/timers_manager.py | 30 +++++++++++++++++++ 1 file changed, 30 insertions(+) diff --git a/openpype/modules/timers_manager/timers_manager.py b/openpype/modules/timers_manager/timers_manager.py index 3453e4bc4c..28702510f6 100644 --- a/openpype/modules/timers_manager/timers_manager.py +++ b/openpype/modules/timers_manager/timers_manager.py @@ -415,6 +415,36 @@ class TimersManager(OpenPypeModule, ITrayService, ILaunchHookPaths): return requests.post(rest_api_url, json=data) + @staticmethod + def stop_timer_with_webserver(logger=None): + """Prepared method for calling stop timers on REST api. + + Args: + logger (logging.Logger): Logger used for logging messages. + """ + + webserver_url = os.environ.get("OPENPYPE_WEBSERVER_URL") + if not webserver_url: + msg = "Couldn't find webserver url" + if logger is not None: + logger.warning(msg) + else: + print(msg) + return + + rest_api_url = "{}/timers_manager/stop_timer".format(webserver_url) + try: + import requests + except Exception: + msg = "Couldn't start timer ('requests' is not available)" + if logger is not None: + logger.warning(msg) + else: + print(msg) + return + + return requests.post(rest_api_url) + def on_host_install(self, host, host_name, project_name): self.log.debug("Installing task changed callback") register_event_callback("taskChanged", self._on_host_task_change) From 29239178cba6cb3b5e6462771f301b5c104cae75 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 9 Aug 2022 18:00:41 +0200 Subject: [PATCH 746/785] timers manager is adding plugin paths --- .../modules/timers_manager/timers_manager.py | 21 +++++++++++++++++-- 1 file changed, 19 insertions(+), 2 deletions(-) diff --git a/openpype/modules/timers_manager/timers_manager.py b/openpype/modules/timers_manager/timers_manager.py index 28702510f6..bfd450ce8c 100644 --- a/openpype/modules/timers_manager/timers_manager.py +++ b/openpype/modules/timers_manager/timers_manager.py @@ -6,7 +6,8 @@ from openpype.client import get_asset_by_name from openpype.modules import OpenPypeModule from openpype_interfaces import ( ITrayService, - ILaunchHookPaths + ILaunchHookPaths, + IPluginPaths ) from openpype.lib.events import register_event_callback @@ -72,7 +73,12 @@ class ExampleTimersManagerConnector: self._timers_manager_module.timer_stopped(self._module.id) -class TimersManager(OpenPypeModule, ITrayService, ILaunchHookPaths): +class TimersManager( + OpenPypeModule, + ITrayService, + ILaunchHookPaths, + IPluginPaths +): """ Handles about Timers. Should be able to start/stop all timers at once. @@ -177,11 +183,21 @@ class TimersManager(OpenPypeModule, ITrayService, ILaunchHookPaths): def get_launch_hook_paths(self): """Implementation of `ILaunchHookPaths`.""" + return os.path.join( os.path.dirname(os.path.abspath(__file__)), "launch_hooks" ) + def get_plugin_paths(self): + """Implementation of `IPluginPaths`.""" + + timer_module_dir = os.path.dirname(os.path.abspath(__file__)) + + return { + "publish": [os.path.join(timer_module_dir, "plugins", "publish")] + } + @staticmethod def get_timer_data_for_context( project_name, asset_name, task_name, logger=None @@ -388,6 +404,7 @@ class TimersManager(OpenPypeModule, ITrayService, ILaunchHookPaths): logger (logging.Logger): Logger object. Using 'print' if not passed. """ + webserver_url = os.environ.get("OPENPYPE_WEBSERVER_URL") if not webserver_url: msg = "Couldn't find webserver url" From 70bcd6bf9062df6bb72948b02b3344c153f242fc Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 9 Aug 2022 18:04:48 +0200 Subject: [PATCH 747/785] moved start and stop plugins into timers manager --- .../plugins/publish/start_timer.py | 39 +++++++++++++++++++ .../plugins/publish/stop_timer.py | 27 +++++++++++++ openpype/plugins/publish/start_timer.py | 14 ------- openpype/plugins/publish/stop_timer.py | 17 -------- 4 files changed, 66 insertions(+), 31 deletions(-) create mode 100644 openpype/modules/timers_manager/plugins/publish/start_timer.py create mode 100644 openpype/modules/timers_manager/plugins/publish/stop_timer.py delete mode 100644 openpype/plugins/publish/start_timer.py delete mode 100644 openpype/plugins/publish/stop_timer.py diff --git a/openpype/modules/timers_manager/plugins/publish/start_timer.py b/openpype/modules/timers_manager/plugins/publish/start_timer.py new file mode 100644 index 0000000000..6408327ca1 --- /dev/null +++ b/openpype/modules/timers_manager/plugins/publish/start_timer.py @@ -0,0 +1,39 @@ +""" +Requires: + context -> system_settings + context -> openPypeModules +""" + +import pyblish.api + +from openpype.pipeline import legacy_io + + +class StartTimer(pyblish.api.ContextPlugin): + label = "Start Timer" + order = pyblish.api.IntegratorOrder + 1 + hosts = ["*"] + + def process(self, context): + timers_manager = context.data["openPypeModules"]["timers_manager"] + if not timers_manager.enabled: + self.log.debug("TimersManager is disabled") + return + + modules_settings = context.data["system_settings"]["modules"] + if not modules_settings["timers_manager"]["disregard_publishing"]: + self.log.debug("Publish is not affecting running timers.") + return + + project_name = legacy_io.active_project() + asset_name = legacy_io.Session.get("AVALON_ASSET") + task_name = legacy_io.Session.get("AVALON_TASK") + if not project_name or not asset_name or not task_name: + self.log.info(( + "Current context does not contain all" + " required information to start a timer." + )) + return + timers_manager.start_timer_with_webserver( + project_name, asset_name, task_name, self.log + ) diff --git a/openpype/modules/timers_manager/plugins/publish/stop_timer.py b/openpype/modules/timers_manager/plugins/publish/stop_timer.py new file mode 100644 index 0000000000..a8674ff2ca --- /dev/null +++ b/openpype/modules/timers_manager/plugins/publish/stop_timer.py @@ -0,0 +1,27 @@ +""" +Requires: + context -> system_settings + context -> openPypeModules +""" + + +import pyblish.api + + +class StopTimer(pyblish.api.ContextPlugin): + label = "Stop Timer" + order = pyblish.api.ExtractorOrder - 0.49 + hosts = ["*"] + + def process(self, context): + timers_manager = context.data["openPypeModules"]["timers_manager"] + if not timers_manager.enabled: + self.log.debug("TimersManager is disabled") + return + + modules_settings = context.data["system_settings"]["modules"] + if not modules_settings["timers_manager"]["disregard_publishing"]: + self.log.debug("Publish is not affecting running timers.") + return + + timers_manager.stop_timer_with_webserver(self.log) diff --git a/openpype/plugins/publish/start_timer.py b/openpype/plugins/publish/start_timer.py deleted file mode 100644 index 112d92bef0..0000000000 --- a/openpype/plugins/publish/start_timer.py +++ /dev/null @@ -1,14 +0,0 @@ -import pyblish.api - -from openpype.lib import change_timer_to_current_context - - -class StartTimer(pyblish.api.ContextPlugin): - label = "Start Timer" - order = pyblish.api.IntegratorOrder + 1 - hosts = ["*"] - - def process(self, context): - modules_settings = context.data["system_settings"]["modules"] - if modules_settings["timers_manager"]["disregard_publishing"]: - change_timer_to_current_context() diff --git a/openpype/plugins/publish/stop_timer.py b/openpype/plugins/publish/stop_timer.py deleted file mode 100644 index 414e43a3c4..0000000000 --- a/openpype/plugins/publish/stop_timer.py +++ /dev/null @@ -1,17 +0,0 @@ -import os -import requests - -import pyblish.api - - -class StopTimer(pyblish.api.ContextPlugin): - label = "Stop Timer" - order = pyblish.api.ExtractorOrder - 0.49 - hosts = ["*"] - - def process(self, context): - modules_settings = context.data["system_settings"]["modules"] - if modules_settings["timers_manager"]["disregard_publishing"]: - webserver_url = os.environ.get("OPENPYPE_WEBSERVER_URL") - rest_api_url = "{}/timers_manager/stop_timer".format(webserver_url) - requests.post(rest_api_url) From 51f58340617a225d872f7a99aea8e75b514a0f87 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 9 Aug 2022 18:05:17 +0200 Subject: [PATCH 748/785] changed order of collect modules manager --- openpype/plugins/publish/collect_modules.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/plugins/publish/collect_modules.py b/openpype/plugins/publish/collect_modules.py index 2f6cb1ef0e..d76096bcd9 100644 --- a/openpype/plugins/publish/collect_modules.py +++ b/openpype/plugins/publish/collect_modules.py @@ -7,7 +7,7 @@ import pyblish.api class CollectModules(pyblish.api.ContextPlugin): """Collect OpenPype modules.""" - order = pyblish.api.CollectorOrder - 0.45 + order = pyblish.api.CollectorOrder - 0.5 label = "OpenPype Modules" def process(self, context): From e35fd6e476dd3fb1cab539b1e39aaa1704ef62b5 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 9 Aug 2022 18:19:01 +0200 Subject: [PATCH 749/785] use constant to define timer module dir --- openpype/modules/timers_manager/timers_manager.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/openpype/modules/timers_manager/timers_manager.py b/openpype/modules/timers_manager/timers_manager.py index bfd450ce8c..93332ace4f 100644 --- a/openpype/modules/timers_manager/timers_manager.py +++ b/openpype/modules/timers_manager/timers_manager.py @@ -13,6 +13,8 @@ from openpype.lib.events import register_event_callback from .exceptions import InvalidContextError +TIMER_MODULE_DIR = os.path.dirname(os.path.abspath(__file__)) + class ExampleTimersManagerConnector: """Timers manager can handle timers of multiple modules/addons. @@ -34,6 +36,7 @@ class ExampleTimersManagerConnector: } ``` """ + # Not needed at all def __init__(self, module): # Store timer manager module to be able call it's methods when needed @@ -185,17 +188,15 @@ class TimersManager( """Implementation of `ILaunchHookPaths`.""" return os.path.join( - os.path.dirname(os.path.abspath(__file__)), + TIMER_MODULE_DIR, "launch_hooks" ) def get_plugin_paths(self): """Implementation of `IPluginPaths`.""" - timer_module_dir = os.path.dirname(os.path.abspath(__file__)) - return { - "publish": [os.path.join(timer_module_dir, "plugins", "publish")] + "publish": [os.path.join(TIMER_MODULE_DIR, "plugins", "publish")] } @staticmethod From 77d78aadf979632938cae81f94468f919490cdc8 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 9 Aug 2022 18:19:38 +0200 Subject: [PATCH 750/785] mark 'change_timer_to_current_context' in 'openpype.lib' as deprecated --- openpype/lib/avalon_context.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/openpype/lib/avalon_context.py b/openpype/lib/avalon_context.py index 42854f39d6..eb98ec1d9c 100644 --- a/openpype/lib/avalon_context.py +++ b/openpype/lib/avalon_context.py @@ -1515,13 +1515,21 @@ def get_creator_by_name(creator_name, case_sensitive=False): return None -@with_pipeline_io +@deprecated def change_timer_to_current_context(): """Called after context change to change timers. + Deprecated: + This method is specific for TimersManager module so please use the + functionality from there. Function will be removed after release + version 3.14.* + TODO: - use TimersManager's static method instead of reimplementing it here """ + + from openpype.pipeline import legacy_io + webserver_url = os.environ.get("OPENPYPE_WEBSERVER_URL") if not webserver_url: log.warning("Couldn't find webserver url") From 1c133cf6b126cf4f4a0277ddd455c75455dc93b1 Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Tue, 9 Aug 2022 17:46:58 +0100 Subject: [PATCH 751/785] FIx to use project name instead of code in update for ReferenceLoader --- openpype/hosts/maya/api/plugin.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/maya/api/plugin.py b/openpype/hosts/maya/api/plugin.py index 2b0c6131b4..8c3f6f071a 100644 --- a/openpype/hosts/maya/api/plugin.py +++ b/openpype/hosts/maya/api/plugin.py @@ -235,7 +235,7 @@ class ReferenceLoader(Loader): path = self.prepare_root_value(path, representation["context"] ["project"] - ["code"]) + ["name"]) content = cmds.file(path, loadReference=reference_node, type=file_type, From 4bb98863bd5476794faeb28fb37b9c77cc837dfe Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 9 Aug 2022 18:47:28 +0200 Subject: [PATCH 752/785] add all keys from anatomy data to representation context even if it's already there --- openpype/plugins/publish/integrate_hero_version.py | 10 +++------- 1 file changed, 3 insertions(+), 7 deletions(-) diff --git a/openpype/plugins/publish/integrate_hero_version.py b/openpype/plugins/publish/integrate_hero_version.py index 735b7e50fa..7d698ff98d 100644 --- a/openpype/plugins/publish/integrate_hero_version.py +++ b/openpype/plugins/publish/integrate_hero_version.py @@ -313,13 +313,9 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin): } repre_context = template_filled.used_values for key in self.db_representation_context_keys: - if ( - key in repre_context or - key not in anatomy_data - ): - continue - - repre_context[key] = anatomy_data[key] + value = anatomy_data.get(key) + if value is not None: + repre_context[key] = value # Prepare new repre repre = copy.deepcopy(repre_info["representation"]) From eb0e014beaac279ef019fa13c8213c3ff2196754 Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Tue, 9 Aug 2022 18:35:32 +0100 Subject: [PATCH 753/785] Fix call to load file in case of fbx file --- openpype/hosts/maya/api/plugin.py | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/maya/api/plugin.py b/openpype/hosts/maya/api/plugin.py index 8c3f6f071a..652874997c 100644 --- a/openpype/hosts/maya/api/plugin.py +++ b/openpype/hosts/maya/api/plugin.py @@ -236,10 +236,16 @@ class ReferenceLoader(Loader): representation["context"] ["project"] ["name"]) + + params = { + "loadReference": reference_node, + "returnNewNodes": True + } + if file_type != "fbx": + params["type"] = file_type + content = cmds.file(path, - loadReference=reference_node, - type=file_type, - returnNewNodes=True) + **params) except RuntimeError as exc: # When changing a reference to a file that has load errors the # command will raise an error even if the file is still loaded From 6c10d4412320867ff40422196b562db2ca128ca5 Mon Sep 17 00:00:00 2001 From: OpenPype Date: Wed, 10 Aug 2022 03:43:25 +0000 Subject: [PATCH 754/785] [Automated] Bump version --- CHANGELOG.md | 3 +-- openpype/version.py | 2 +- pyproject.toml | 2 +- 3 files changed, 3 insertions(+), 4 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 3124201758..b7ef795f0a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,7 +2,7 @@ ## [3.13.0](https://github.com/pypeclub/OpenPype/tree/3.13.0) (2022-08-09) -[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.12.2...3.13.0) +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.13.0-nightly.1...3.13.0) **🆕 New features** @@ -89,7 +89,6 @@ - General: Fix hash of centos oiio archive [\#3519](https://github.com/pypeclub/OpenPype/pull/3519) - Maya: Renderman display output fix [\#3514](https://github.com/pypeclub/OpenPype/pull/3514) - TrayPublisher: Simple creation enhancements and fixes [\#3513](https://github.com/pypeclub/OpenPype/pull/3513) -- NewPublisher: Publish attributes are properly collected [\#3510](https://github.com/pypeclub/OpenPype/pull/3510) - TrayPublisher: Make sure host name is filled [\#3504](https://github.com/pypeclub/OpenPype/pull/3504) - NewPublisher: Groups work and enum multivalue [\#3501](https://github.com/pypeclub/OpenPype/pull/3501) diff --git a/openpype/version.py b/openpype/version.py index d2eb3a8ab6..c41e69d00d 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.13.0" +__version__ = "3.13.1-nightly.1" diff --git a/pyproject.toml b/pyproject.toml index 03922a8e67..994c83d369 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "OpenPype" -version = "3.13.0" # OpenPype +version = "3.13.1-nightly.1" # OpenPype description = "Open VFX and Animation pipeline with support." authors = ["OpenPype Team "] license = "MIT License" From 4d477592492407e806e636175b72dd06ed7a42c1 Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Wed, 10 Aug 2022 11:29:46 +0100 Subject: [PATCH 755/785] Fixed with the right type parameter for FBX --- openpype/hosts/maya/api/plugin.py | 14 ++++---------- 1 file changed, 4 insertions(+), 10 deletions(-) diff --git a/openpype/hosts/maya/api/plugin.py b/openpype/hosts/maya/api/plugin.py index 652874997c..e50ebfccad 100644 --- a/openpype/hosts/maya/api/plugin.py +++ b/openpype/hosts/maya/api/plugin.py @@ -209,7 +209,7 @@ class ReferenceLoader(Loader): "ma": "mayaAscii", "mb": "mayaBinary", "abc": "Alembic", - "fbx": "fbx" + "fbx": "FBX" }.get(representation["name"]) assert file_type, "Unsupported representation: %s" % representation @@ -236,16 +236,10 @@ class ReferenceLoader(Loader): representation["context"] ["project"] ["name"]) - - params = { - "loadReference": reference_node, - "returnNewNodes": True - } - if file_type != "fbx": - params["type"] = file_type - content = cmds.file(path, - **params) + loadReference=reference_node, + type=file_type, + returnNewNodes=True) except RuntimeError as exc: # When changing a reference to a file that has load errors the # command will raise an error even if the file is still loaded From f03e63502e80dc7d3a8717db54e22132d0276bdc Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 10 Aug 2022 13:59:26 +0200 Subject: [PATCH 756/785] fixed dotted extensions --- openpype/pipeline/workfile/path_resolving.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/openpype/pipeline/workfile/path_resolving.py b/openpype/pipeline/workfile/path_resolving.py index 6740b710f5..aa75d29372 100644 --- a/openpype/pipeline/workfile/path_resolving.py +++ b/openpype/pipeline/workfile/path_resolving.py @@ -216,11 +216,13 @@ def get_last_workfile_with_version( if not os.path.exists(workdir): return None, None - dotted_extensions = { - ".{}".format(ext) - for ext in extensions - if not ext.startswith(".") - } + + dotted_extensions = set() + for ext in extensions: + if not ext.startswith("."): + ext = ".{}".format(ext) + dotted_extensions.add(ext) + # Fast match on extension filenames = [ filename From 8858377019184f17ddf00b8bd7d3a1e8f06f0e8e Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 10 Aug 2022 14:32:07 +0200 Subject: [PATCH 757/785] formatting changes --- openpype/pipeline/workfile/path_resolving.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/openpype/pipeline/workfile/path_resolving.py b/openpype/pipeline/workfile/path_resolving.py index aa75d29372..ed1d1d793e 100644 --- a/openpype/pipeline/workfile/path_resolving.py +++ b/openpype/pipeline/workfile/path_resolving.py @@ -216,7 +216,6 @@ def get_last_workfile_with_version( if not os.path.exists(workdir): return None, None - dotted_extensions = set() for ext in extensions: if not ext.startswith("."): @@ -227,7 +226,7 @@ def get_last_workfile_with_version( filenames = [ filename for filename in os.listdir(workdir) - if os.path.splitext(filename)[1] in dotted_extensions + if os.path.splitext(filename)[-1] in dotted_extensions ] # Build template without optionals, version to digits only regex From 0528494d9e53368275754befa73bea7dcf7948dd Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 10 Aug 2022 16:10:52 +0200 Subject: [PATCH 758/785] extract review can scale to match pixel ratio --- openpype/plugins/publish/extract_review.py | 63 ++++++++-------------- 1 file changed, 22 insertions(+), 41 deletions(-) diff --git a/openpype/plugins/publish/extract_review.py b/openpype/plugins/publish/extract_review.py index 7442d3aacb..e16f324e0a 100644 --- a/openpype/plugins/publish/extract_review.py +++ b/openpype/plugins/publish/extract_review.py @@ -1210,7 +1210,6 @@ class ExtractReview(pyblish.api.InstancePlugin): # Get instance data pixel_aspect = temp_data["pixel_aspect"] - if reformat_in_baking: self.log.debug(( "Using resolution from input. It is already " @@ -1230,6 +1229,10 @@ class ExtractReview(pyblish.api.InstancePlugin): # - settings value can't have None but has value of 0 output_width = output_def.get("width") or output_width or None output_height = output_def.get("height") or output_height or None + # Force to use input resolution if output resolution was not defined + # in settings. Resolution from instance is not used when + # 'use_input_res' is set to 'True'. + use_input_res = False # Overscal color overscan_color_value = "black" @@ -1241,6 +1244,17 @@ class ExtractReview(pyblish.api.InstancePlugin): ) self.log.debug("Overscan color: `{}`".format(overscan_color_value)) + # Scale input to have proper pixel aspect ratio + # - scale width by the pixel aspect ratio + scale_pixel_aspect = output_def.get("scale_pixel_aspect", True) + if scale_pixel_aspect and pixel_aspect != 1: + # Change input width after pixel aspect + input_width = int(input_width * pixel_aspect) + use_input_res = True + filters.append(( + "scale={}x{}:flags=lanczos".format(input_width, input_height) + )) + # Convert overscan value video filters overscan_crop = output_def.get("overscan_crop") overscan = OverscanCrop( @@ -1251,13 +1265,10 @@ class ExtractReview(pyblish.api.InstancePlugin): # resolution by it's values if overscan_crop_filters: filters.extend(overscan_crop_filters) + # Change input resolution after overscan crop input_width = overscan.width() input_height = overscan.height() - # Use output resolution as inputs after cropping to skip usage of - # instance data resolution - if output_width is None or output_height is None: - output_width = input_width - output_height = input_height + use_input_res = True # Make sure input width and height is not an odd number input_width_is_odd = bool(input_width % 2 != 0) @@ -1283,8 +1294,10 @@ class ExtractReview(pyblish.api.InstancePlugin): self.log.debug("input_width: `{}`".format(input_width)) self.log.debug("input_height: `{}`".format(input_height)) - # Use instance resolution if output definition has not set it. - if output_width is None or output_height is None: + # Use instance resolution if output definition has not set it + # - use instance resolution only if there were not scale changes + # that may massivelly affect output 'use_input_res' + if not use_input_res and output_width is None or output_height is None: output_width = temp_data["resolution_width"] output_height = temp_data["resolution_height"] @@ -1326,7 +1339,6 @@ class ExtractReview(pyblish.api.InstancePlugin): output_width == input_width and output_height == input_height and not letter_box_enabled - and pixel_aspect == 1 ): self.log.debug( "Output resolution is same as input's" @@ -1336,39 +1348,8 @@ class ExtractReview(pyblish.api.InstancePlugin): new_repre["resolutionHeight"] = input_height return filters - # defining image ratios - input_res_ratio = ( - (float(input_width) * pixel_aspect) / input_height - ) - output_res_ratio = float(output_width) / float(output_height) - self.log.debug("input_res_ratio: `{}`".format(input_res_ratio)) - self.log.debug("output_res_ratio: `{}`".format(output_res_ratio)) - - # Round ratios to 2 decimal places for comparing - input_res_ratio = round(input_res_ratio, 2) - output_res_ratio = round(output_res_ratio, 2) - - # get scale factor - scale_factor_by_width = ( - float(output_width) / (input_width * pixel_aspect) - ) - scale_factor_by_height = ( - float(output_height) / input_height - ) - - self.log.debug( - "scale_factor_by_with: `{}`".format(scale_factor_by_width) - ) - self.log.debug( - "scale_factor_by_height: `{}`".format(scale_factor_by_height) - ) - # scaling none square pixels and 1920 width - if ( - input_height != output_height - or input_width != output_width - or pixel_aspect != 1 - ): + if input_height != output_height or input_width != output_width: filters.extend([ ( "scale={}x{}" From 3d62093224be2b3786823b175f1bfd1ffa3aad3d Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 10 Aug 2022 16:14:50 +0200 Subject: [PATCH 759/785] Refactor moved usage of CreateRender settings --- openpype/hosts/maya/api/lib_rendersettings.py | 3 +-- .../hosts/maya/plugins/publish/validate_render_image_rule.py | 3 +-- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/maya/api/lib_rendersettings.py b/openpype/hosts/maya/api/lib_rendersettings.py index 9aea55a03b..7cd2193086 100644 --- a/openpype/hosts/maya/api/lib_rendersettings.py +++ b/openpype/hosts/maya/api/lib_rendersettings.py @@ -60,8 +60,7 @@ class RenderSettings(object): try: aov_separator = self._aov_chars[( self._project_settings["maya"] - ["create"] - ["CreateRender"] + ["RenderSettings"] ["aov_separator"] )] except KeyError: diff --git a/openpype/hosts/maya/plugins/publish/validate_render_image_rule.py b/openpype/hosts/maya/plugins/publish/validate_render_image_rule.py index 642ca9e25d..0abcf2f12a 100644 --- a/openpype/hosts/maya/plugins/publish/validate_render_image_rule.py +++ b/openpype/hosts/maya/plugins/publish/validate_render_image_rule.py @@ -41,6 +41,5 @@ class ValidateRenderImageRule(pyblish.api.InstancePlugin): def get_default_render_image_folder(instance): return instance.context.data.get('project_settings')\ .get('maya') \ - .get('create') \ - .get('CreateRender') \ + .get('RenderSettings') \ .get('default_render_image_folder') From 7a16cb723b8329d493697c153881533808a2c0e2 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 10 Aug 2022 16:42:34 +0200 Subject: [PATCH 760/785] added settings for rescaling when pixel aspect ratio is not 1 --- openpype/settings/defaults/project_settings/global.json | 1 + .../projects_schema/schemas/schema_global_publish.json | 9 +++++++++ 2 files changed, 10 insertions(+) diff --git a/openpype/settings/defaults/project_settings/global.json b/openpype/settings/defaults/project_settings/global.json index e509db2791..0ff9363ba7 100644 --- a/openpype/settings/defaults/project_settings/global.json +++ b/openpype/settings/defaults/project_settings/global.json @@ -85,6 +85,7 @@ ], "width": 0, "height": 0, + "scale_pixel_aspect": true, "bg_color": [ 0, 0, diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json index b9d0b7daba..e1aa230b49 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json @@ -319,6 +319,15 @@ "minimum": 0, "maximum": 100000 }, + { + "type": "label", + "label": "Rescale input when it's pixel aspect ratio is not 1. Usefull for anamorph reviews." + }, + { + "key": "scale_pixel_aspect", + "label": "Scale pixel aspect", + "type": "boolean" + }, { "type": "label", "label": "Background color is used only when input have transparency and Alpha is higher than 0." From 74a91f4d22ebcacbab07f05ca44fd8e1dbf1d6c2 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 10 Aug 2022 17:01:42 +0200 Subject: [PATCH 761/785] Fix more missing refactors --- openpype/hosts/maya/plugins/publish/collect_render.py | 3 +-- .../modules/deadline/plugins/publish/submit_maya_deadline.py | 3 +-- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/collect_render.py b/openpype/hosts/maya/plugins/publish/collect_render.py index e6fc8a01e5..085403bdf7 100644 --- a/openpype/hosts/maya/plugins/publish/collect_render.py +++ b/openpype/hosts/maya/plugins/publish/collect_render.py @@ -203,8 +203,7 @@ class CollectMayaRender(pyblish.api.ContextPlugin): aov_dict = {} default_render_file = context.data.get('project_settings')\ .get('maya')\ - .get('create')\ - .get('CreateRender')\ + .get('RenderSettings')\ .get('default_render_image_folder') or "" # replace relative paths with absolute. Render products are # returned as list of dictionaries. diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index f253ceb21a..13dfc0183a 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -413,8 +413,7 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin): # Gather needed data ------------------------------------------------ default_render_file = instance.context.data.get('project_settings')\ .get('maya')\ - .get('create')\ - .get('CreateRender')\ + .get('RenderSettings')\ .get('default_render_image_folder') filename = os.path.basename(filepath) comment = context.data.get("comment", "") From bbf113cac4c8dd0dde7cca18646641107a505b44 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 11 Aug 2022 11:54:07 +0200 Subject: [PATCH 762/785] Set default value for default render image folder to "renders" --- openpype/settings/defaults/project_settings/maya.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index ac0f161cf2..ce9cd4d606 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -33,7 +33,7 @@ }, "RenderSettings": { "apply_render_settings": true, - "default_render_image_folder": "", + "default_render_image_folder": "renders", "aov_separator": "underscore", "reset_current_frame": false, "arnold_renderer": { From f0a6a6414ea86178f0d02ed83d8816919a86beb1 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 11 Aug 2022 11:54:35 +0200 Subject: [PATCH 763/785] Tweak ValidateRenderImageRule docstring and invalidation error message --- .../publish/validate_render_image_rule.py | 16 +++++++++++----- 1 file changed, 11 insertions(+), 5 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/validate_render_image_rule.py b/openpype/hosts/maya/plugins/publish/validate_render_image_rule.py index 0abcf2f12a..a9be996e0c 100644 --- a/openpype/hosts/maya/plugins/publish/validate_render_image_rule.py +++ b/openpype/hosts/maya/plugins/publish/validate_render_image_rule.py @@ -11,7 +11,11 @@ def get_file_rule(rule): class ValidateRenderImageRule(pyblish.api.InstancePlugin): - """Validates "images" file rule is set to "renders/" + """Validates Maya Workpace "images" file rule matches project settings. + + This validates against the configured default render image folder: + Studio Settings > Project > Maya > + Render Settings > Default render image folder. """ @@ -23,11 +27,13 @@ class ValidateRenderImageRule(pyblish.api.InstancePlugin): def process(self, instance): - default_render_file = self.get_default_render_image_folder(instance) + required_images_rule = self.get_default_render_image_folder(instance) + current_images_rule = get_file_rule("images") - assert get_file_rule("images") == default_render_file, ( - "Workspace's `images` file rule must be set to: {}".format( - default_render_file + assert current_images_rule == required_images_rule, ( + "Invalid workspace `images` file rule value: '{}'. " + "Must be set to: '{}'".format( + current_images_rule, required_images_rule ) ) From 8fe20486a91a8943b847b610d342df163dee3e1b Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 11 Aug 2022 12:51:01 +0200 Subject: [PATCH 764/785] Remove usage of mel eval and pymel --- .../plugins/publish/validate_render_image_rule.py | 14 ++++---------- 1 file changed, 4 insertions(+), 10 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/validate_render_image_rule.py b/openpype/hosts/maya/plugins/publish/validate_render_image_rule.py index a9be996e0c..b94bdb0b14 100644 --- a/openpype/hosts/maya/plugins/publish/validate_render_image_rule.py +++ b/openpype/hosts/maya/plugins/publish/validate_render_image_rule.py @@ -1,15 +1,9 @@ -import maya.mel as mel -import pymel.core as pm +from maya import cmds import pyblish.api import openpype.api -def get_file_rule(rule): - """Workaround for a bug in python with cmds.workspace""" - return mel.eval('workspace -query -fileRuleEntry "{}"'.format(rule)) - - class ValidateRenderImageRule(pyblish.api.InstancePlugin): """Validates Maya Workpace "images" file rule matches project settings. @@ -28,7 +22,7 @@ class ValidateRenderImageRule(pyblish.api.InstancePlugin): def process(self, instance): required_images_rule = self.get_default_render_image_folder(instance) - current_images_rule = get_file_rule("images") + current_images_rule = cmds.workspace(fileRuleEntry="images") assert current_images_rule == required_images_rule, ( "Invalid workspace `images` file rule value: '{}'. " @@ -40,8 +34,8 @@ class ValidateRenderImageRule(pyblish.api.InstancePlugin): @classmethod def repair(cls, instance): default = cls.get_default_render_image_folder(instance) - pm.workspace.fileRules["images"] = default - pm.system.Workspace.save() + cmds.workspace(fileRule=("images", default)) + cmds.workspace(saveWorkspace=True) @staticmethod def get_default_render_image_folder(instance): From 7cd47ff6c4641d7e78c8d3e9823f4d58fdea1135 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 11 Aug 2022 12:54:33 +0200 Subject: [PATCH 765/785] Only update and save the workspace once This avoids saving it many times on repair in scenes with many renderlayers and thus many renderlayer instances since repair runs per instance. --- .../maya/plugins/publish/validate_render_image_rule.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/validate_render_image_rule.py b/openpype/hosts/maya/plugins/publish/validate_render_image_rule.py index b94bdb0b14..4d3796e429 100644 --- a/openpype/hosts/maya/plugins/publish/validate_render_image_rule.py +++ b/openpype/hosts/maya/plugins/publish/validate_render_image_rule.py @@ -33,9 +33,13 @@ class ValidateRenderImageRule(pyblish.api.InstancePlugin): @classmethod def repair(cls, instance): - default = cls.get_default_render_image_folder(instance) - cmds.workspace(fileRule=("images", default)) - cmds.workspace(saveWorkspace=True) + + required_images_rule = cls.get_default_render_image_folder(instance) + current_images_rule = cmds.workspace(fileRuleEntry="images") + + if current_images_rule != required_images_rule: + cmds.workspace(fileRule=("images", required_images_rule)) + cmds.workspace(saveWorkspace=True) @staticmethod def get_default_render_image_folder(instance): From 7f48af4bdcf524ae91447038b658a60aa256f80e Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Fri, 12 Aug 2022 00:11:52 +0800 Subject: [PATCH 766/785] Collect full_exp_files instead of leaving it empty --- openpype/hosts/maya/plugins/publish/collect_render.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/hosts/maya/plugins/publish/collect_render.py b/openpype/hosts/maya/plugins/publish/collect_render.py index e6fc8a01e5..26ad0818e0 100644 --- a/openpype/hosts/maya/plugins/publish/collect_render.py +++ b/openpype/hosts/maya/plugins/publish/collect_render.py @@ -219,6 +219,7 @@ class CollectMayaRender(pyblish.api.ContextPlugin): full_paths.append(full_path) publish_meta_path = os.path.dirname(full_path) aov_dict[aov_first_key] = full_paths + full_exp_files = [aov_dict] frame_start_render = int(self.get_render_attribute( "startFrame", layer=layer_name)) From 782a393a20ba61538fcacd181d0f1a7f47bc798b Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Fri, 12 Aug 2022 00:16:41 +0800 Subject: [PATCH 767/785] Collect full_exp_files instead of leaving it empty --- openpype/hosts/maya/plugins/publish/collect_render.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/publish/collect_render.py b/openpype/hosts/maya/plugins/publish/collect_render.py index 26ad0818e0..e132cffe53 100644 --- a/openpype/hosts/maya/plugins/publish/collect_render.py +++ b/openpype/hosts/maya/plugins/publish/collect_render.py @@ -199,7 +199,6 @@ class CollectMayaRender(pyblish.api.ContextPlugin): ) # append full path - full_exp_files = [] aov_dict = {} default_render_file = context.data.get('project_settings')\ .get('maya')\ From d9e3815878b3868b18478b9dea2328d140bf2d92 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 12 Aug 2022 12:23:06 +0200 Subject: [PATCH 768/785] Refactored content of help, eg error message --- openpype/plugins/publish/help/validate_containers.xml | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/openpype/plugins/publish/help/validate_containers.xml b/openpype/plugins/publish/help/validate_containers.xml index e540c3c7a9..8424ee919c 100644 --- a/openpype/plugins/publish/help/validate_containers.xml +++ b/openpype/plugins/publish/help/validate_containers.xml @@ -3,9 +3,9 @@ Not up-to-date assets -## Obsolete containers found +## Outdated containers found -Scene contains one or more obsolete loaded containers, eg. items loaded into scene by Loader. +Scene contains one or more outdated loaded containers, eg. versions of items loaded into scene by Loader are not latest. ### How to repair? @@ -17,8 +17,7 @@ Use 'Scene Inventory' and update all highlighted old container to latest OR ### __Detailed Info__ (optional) -This validator protects you from rendering obsolete content, someone modified some referenced asset in this scene, eg. - by skipping this you would ignore changes to that asset. +This validates whether you're working with the latest versions of published content loaded into your scene. This protects you from using outdated versions of an asset. \ No newline at end of file From ec157e0a2a3a04aa18caf3135846ff3ad29486aa Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Fri, 12 Aug 2022 18:34:12 +0800 Subject: [PATCH 769/785] fix the bug of failing to extract look when UDIM format used in AiImage --- openpype/hosts/maya/plugins/publish/extract_look.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/openpype/hosts/maya/plugins/publish/extract_look.py b/openpype/hosts/maya/plugins/publish/extract_look.py index 80d82a4f58..bf7f5bc757 100644 --- a/openpype/hosts/maya/plugins/publish/extract_look.py +++ b/openpype/hosts/maya/plugins/publish/extract_look.py @@ -436,6 +436,16 @@ class ExtractLook(openpype.api.Extractor): # set color space to raw if we linearized it color_space = "Raw" else: + + # if the files are unresolved from `source` + # assume color space from the first file of + # the resource + first_file = next(iter(resource.get("files", [])), None) + if not first_file: + # No files for this resource? Can this happen? Should this error? + continue + + filepath = os.path.normpath(first_file) # if the files are unresolved if files_metadata[filepath]["color_space"] == "Raw": # set color space to raw if we linearized it From 82c4f19979ea7055cb742c3321a0bcd9b2d5a73d Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Fri, 12 Aug 2022 18:36:05 +0800 Subject: [PATCH 770/785] fix the bug of failing to extract look when UDIM format used in AiImage --- openpype/hosts/maya/plugins/publish/extract_look.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_look.py b/openpype/hosts/maya/plugins/publish/extract_look.py index bf7f5bc757..8e09a564d0 100644 --- a/openpype/hosts/maya/plugins/publish/extract_look.py +++ b/openpype/hosts/maya/plugins/publish/extract_look.py @@ -436,15 +436,12 @@ class ExtractLook(openpype.api.Extractor): # set color space to raw if we linearized it color_space = "Raw" else: - # if the files are unresolved from `source` # assume color space from the first file of # the resource first_file = next(iter(resource.get("files", [])), None) if not first_file: - # No files for this resource? Can this happen? Should this error? continue - filepath = os.path.normpath(first_file) # if the files are unresolved if files_metadata[filepath]["color_space"] == "Raw": From 7526d4cfa5252b646469c79db782b1b4a04373ae Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 12 Aug 2022 13:34:37 +0200 Subject: [PATCH 771/785] Update openpype/plugins/publish/help/validate_containers.xml Co-authored-by: Roy Nieterau --- openpype/plugins/publish/help/validate_containers.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/plugins/publish/help/validate_containers.xml b/openpype/plugins/publish/help/validate_containers.xml index 8424ee919c..5d18bb4c19 100644 --- a/openpype/plugins/publish/help/validate_containers.xml +++ b/openpype/plugins/publish/help/validate_containers.xml @@ -5,7 +5,7 @@ ## Outdated containers found -Scene contains one or more outdated loaded containers, eg. versions of items loaded into scene by Loader are not latest. +Scene contains one or more outdated loaded containers, eg. versions loaded into scene by Loader are not latest. ### How to repair? From 2cf01d8605e2588ce437579b55d409cf2027b452 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 12 Aug 2022 13:44:13 +0200 Subject: [PATCH 772/785] Fix Scene Inventory select actions --- openpype/tools/sceneinventory/view.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/openpype/tools/sceneinventory/view.py b/openpype/tools/sceneinventory/view.py index 63d181b2d6..e0e43aaba7 100644 --- a/openpype/tools/sceneinventory/view.py +++ b/openpype/tools/sceneinventory/view.py @@ -551,16 +551,16 @@ class SceneInventoryView(QtWidgets.QTreeView): "toggle": selection_model.Toggle, }[options.get("mode", "select")] - for item in iter_model_rows(model, 0): - item = item.data(InventoryModel.ItemRole) + for index in iter_model_rows(model, 0): + item = index.data(InventoryModel.ItemRole) if item.get("isGroupNode"): continue name = item.get("objectName") if name in object_names: - self.scrollTo(item) # Ensure item is visible + self.scrollTo(index) # Ensure item is visible flags = select_mode | selection_model.Rows - selection_model.select(item, flags) + selection_model.select(index, flags) object_names.remove(name) From dc73bbdb13044d077b5576cd33ebc7b51597a70c Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Fri, 12 Aug 2022 20:49:34 +0800 Subject: [PATCH 773/785] fix the bug of failing to extract look when UDIM formats used in AiImage --- .../maya/plugins/publish/extract_look.py | 23 +++++++++++-------- 1 file changed, 13 insertions(+), 10 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_look.py b/openpype/hosts/maya/plugins/publish/extract_look.py index 8e09a564d0..991f44c74f 100644 --- a/openpype/hosts/maya/plugins/publish/extract_look.py +++ b/openpype/hosts/maya/plugins/publish/extract_look.py @@ -430,22 +430,25 @@ class ExtractLook(openpype.api.Extractor): color_space = "Raw" else: # get all the resolved files in Maya File Path Editor - src = files_metadata.get(source) - if src: - if files_metadata[source]["color_space"] == "Raw": + metadata = files_metadata.get(source) + if metadata: + metadata = files_metadata[source] + if metadata["color_space"] == "Raw": # set color space to raw if we linearized it color_space = "Raw" else: # if the files are unresolved from `source` # assume color space from the first file of # the resource - first_file = next(iter(resource.get("files", [])), None) - if not first_file: - continue - filepath = os.path.normpath(first_file) - # if the files are unresolved - if files_metadata[filepath]["color_space"] == "Raw": - # set color space to raw if we linearized it + metadata = files_metadata.get(source) + if not metadata: + first_file = next(iter(resource.get("files", [])), None) + if not first_file: + continue + first_filepath = os.path.normpath(first_file) + metadata = files_metadata[first_filepath] + if metadata["color_space"] == "Raw": + # set color space to raw if we linearized it color_space = "Raw" # Remap file node filename to destination remap[color_space_attr] = color_space From fc65721838a90111c9137b45f062d1f51ad06c08 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Fri, 12 Aug 2022 20:52:47 +0800 Subject: [PATCH 774/785] fix the bug of failing to extract look when UDIM formats used in AiImage --- openpype/hosts/maya/plugins/publish/extract_look.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_look.py b/openpype/hosts/maya/plugins/publish/extract_look.py index 991f44c74f..02957bb0ad 100644 --- a/openpype/hosts/maya/plugins/publish/extract_look.py +++ b/openpype/hosts/maya/plugins/publish/extract_look.py @@ -442,7 +442,8 @@ class ExtractLook(openpype.api.Extractor): # the resource metadata = files_metadata.get(source) if not metadata: - first_file = next(iter(resource.get("files", [])), None) + first_file = next(iter(resource.get( + "files", [])), None) if not first_file: continue first_filepath = os.path.normpath(first_file) From f5578cf664321d4c2488c2ac46dbb893f8822cf0 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Fri, 12 Aug 2022 20:57:18 +0800 Subject: [PATCH 775/785] fix the bug of failing to extract look when UDIM formats used in AiImage --- openpype/hosts/maya/plugins/publish/extract_look.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_look.py b/openpype/hosts/maya/plugins/publish/extract_look.py index 02957bb0ad..68d80de5b8 100644 --- a/openpype/hosts/maya/plugins/publish/extract_look.py +++ b/openpype/hosts/maya/plugins/publish/extract_look.py @@ -429,7 +429,7 @@ class ExtractLook(openpype.api.Extractor): # node doesn't have color space attribute color_space = "Raw" else: - # get all the resolved files in Maya File Path Editor + # get all the resolved files metadata = files_metadata.get(source) if metadata: metadata = files_metadata[source] From ffea3e85fee6058fd3fc38982d228c51f463645c Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Fri, 12 Aug 2022 21:34:30 +0800 Subject: [PATCH 776/785] fix the bug of failing to extract look when UDIM formats used in AiImage --- .../maya/plugins/publish/extract_look.py | 25 +++++++------------ 1 file changed, 9 insertions(+), 16 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_look.py b/openpype/hosts/maya/plugins/publish/extract_look.py index 68d80de5b8..5ece5e2e1b 100644 --- a/openpype/hosts/maya/plugins/publish/extract_look.py +++ b/openpype/hosts/maya/plugins/publish/extract_look.py @@ -431,24 +431,17 @@ class ExtractLook(openpype.api.Extractor): else: # get all the resolved files metadata = files_metadata.get(source) - if metadata: - metadata = files_metadata[source] - if metadata["color_space"] == "Raw": - # set color space to raw if we linearized it - color_space = "Raw" - else: - # if the files are unresolved from `source` - # assume color space from the first file of - # the resource - metadata = files_metadata.get(source) - if not metadata: - first_file = next(iter(resource.get( - "files", [])), None) - if not first_file: - continue + # if the files are unresolved from `source` + # assume color space from the first file of + # the resource + if not metadata: + first_file = next(iter(resource.get( + "files", [])), None) + if not first_file: + continue first_filepath = os.path.normpath(first_file) metadata = files_metadata[first_filepath] - if metadata["color_space"] == "Raw": + if metadata["color_space"] == "Raw": # set color space to raw if we linearized it color_space = "Raw" # Remap file node filename to destination From 9b01e6e0326b4750c043da207adc2b8495a8ebce Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Fri, 12 Aug 2022 21:36:40 +0800 Subject: [PATCH 777/785] fix the bug of failing to extract look when UDIM formats used in AiImage --- openpype/hosts/maya/plugins/publish/extract_look.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_look.py b/openpype/hosts/maya/plugins/publish/extract_look.py index 5ece5e2e1b..63a695cecf 100644 --- a/openpype/hosts/maya/plugins/publish/extract_look.py +++ b/openpype/hosts/maya/plugins/publish/extract_look.py @@ -442,8 +442,8 @@ class ExtractLook(openpype.api.Extractor): first_filepath = os.path.normpath(first_file) metadata = files_metadata[first_filepath] if metadata["color_space"] == "Raw": - # set color space to raw if we linearized it - color_space = "Raw" + # set color space to raw if we linearized it + color_space = "Raw" # Remap file node filename to destination remap[color_space_attr] = color_space attr = resource["attribute"] From a9cee020b5f2044af533c06323c697162821624f Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Fri, 12 Aug 2022 21:38:45 +0800 Subject: [PATCH 778/785] fix the bug of failing to extract look when UDIM formats used in AiImage --- openpype/hosts/maya/plugins/publish/extract_look.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_look.py b/openpype/hosts/maya/plugins/publish/extract_look.py index 63a695cecf..95f319a924 100644 --- a/openpype/hosts/maya/plugins/publish/extract_look.py +++ b/openpype/hosts/maya/plugins/publish/extract_look.py @@ -431,7 +431,7 @@ class ExtractLook(openpype.api.Extractor): else: # get all the resolved files metadata = files_metadata.get(source) - # if the files are unresolved from `source` + # if the files are unresolved from `source` # assume color space from the first file of # the resource if not metadata: From 85575e3a99f5618304fc41f5e73a117fe66abc0b Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Fri, 12 Aug 2022 21:40:40 +0800 Subject: [PATCH 779/785] fix the bug of failing to extract look when UDIM formats used in AiImage --- openpype/hosts/maya/plugins/publish/extract_look.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_look.py b/openpype/hosts/maya/plugins/publish/extract_look.py index 95f319a924..c9e41503da 100644 --- a/openpype/hosts/maya/plugins/publish/extract_look.py +++ b/openpype/hosts/maya/plugins/publish/extract_look.py @@ -429,7 +429,7 @@ class ExtractLook(openpype.api.Extractor): # node doesn't have color space attribute color_space = "Raw" else: - # get all the resolved files + # get all resolved files metadata = files_metadata.get(source) # if the files are unresolved from `source` # assume color space from the first file of From f9f275f6a0555c5e1250b6f2b19aa606ce2fb6e3 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Fri, 12 Aug 2022 21:47:08 +0800 Subject: [PATCH 780/785] fix the bug of failing to extract look when UDIM formats used in AiImage --- openpype/hosts/maya/plugins/publish/extract_look.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_look.py b/openpype/hosts/maya/plugins/publish/extract_look.py index c9e41503da..93bfa8c913 100644 --- a/openpype/hosts/maya/plugins/publish/extract_look.py +++ b/openpype/hosts/maya/plugins/publish/extract_look.py @@ -429,7 +429,6 @@ class ExtractLook(openpype.api.Extractor): # node doesn't have color space attribute color_space = "Raw" else: - # get all resolved files metadata = files_metadata.get(source) # if the files are unresolved from `source` # assume color space from the first file of From cd64ffb8f8a85b30edb4e7c01fb2d90d33bd77ba Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Fri, 12 Aug 2022 21:51:45 +0800 Subject: [PATCH 781/785] fix the bug of failing to extract look when UDIM formats used in AiImage --- openpype/hosts/maya/plugins/publish/extract_look.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_look.py b/openpype/hosts/maya/plugins/publish/extract_look.py index 93bfa8c913..8be0c7aae5 100644 --- a/openpype/hosts/maya/plugins/publish/extract_look.py +++ b/openpype/hosts/maya/plugins/publish/extract_look.py @@ -429,9 +429,10 @@ class ExtractLook(openpype.api.Extractor): # node doesn't have color space attribute color_space = "Raw" else: + # get the resolved files metadata = files_metadata.get(source) - # if the files are unresolved from `source` - # assume color space from the first file of + # if the files are unresolved from `source` + # assume color space from the first file of # the resource if not metadata: first_file = next(iter(resource.get( From fd56f09c8423ea6438d6606c69dfa6c45ba9e8eb Mon Sep 17 00:00:00 2001 From: OpenPype Date: Sat, 13 Aug 2022 03:48:01 +0000 Subject: [PATCH 782/785] [Automated] Bump version --- CHANGELOG.md | 25 +++++++++++++++++++------ openpype/version.py | 2 +- pyproject.toml | 2 +- 3 files changed, 21 insertions(+), 8 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index b7ef795f0a..2adb4ac154 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,23 @@ # Changelog +## [3.13.1-nightly.2](https://github.com/pypeclub/OpenPype/tree/HEAD) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.13.0...HEAD) + +**🐛 Bug fixes** + +- General: Hero version representations have full context [\#3638](https://github.com/pypeclub/OpenPype/pull/3638) +- Maya: FBX support for update in reference loader [\#3631](https://github.com/pypeclub/OpenPype/pull/3631) + +**🔀 Refactored code** + +- TimersManager: Plugins are in timers manager module [\#3639](https://github.com/pypeclub/OpenPype/pull/3639) +- General: Move workfiles functions into pipeline [\#3637](https://github.com/pypeclub/OpenPype/pull/3637) + +**Merged pull requests:** + +- Kitsu|Fix: Movie project type fails & first loop children names [\#3636](https://github.com/pypeclub/OpenPype/pull/3636) + ## [3.13.0](https://github.com/pypeclub/OpenPype/tree/3.13.0) (2022-08-09) [Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.13.0-nightly.1...3.13.0) @@ -37,6 +55,7 @@ - General: Update imports in start script [\#3579](https://github.com/pypeclub/OpenPype/pull/3579) - Nuke: render family integration consistency [\#3576](https://github.com/pypeclub/OpenPype/pull/3576) - Ftrack: Handle missing published path in integrator [\#3570](https://github.com/pypeclub/OpenPype/pull/3570) +- Maya: fix Review image plane attribute [\#3569](https://github.com/pypeclub/OpenPype/pull/3569) - Nuke: publish existing frames with slate with correct range [\#3555](https://github.com/pypeclub/OpenPype/pull/3555) **🔀 Refactored code** @@ -68,13 +87,9 @@ - Maya: add additional validators to Settings [\#3540](https://github.com/pypeclub/OpenPype/pull/3540) - General: Interactive console in cli [\#3526](https://github.com/pypeclub/OpenPype/pull/3526) - Ftrack: Automatic daily review session creation can define trigger hour [\#3516](https://github.com/pypeclub/OpenPype/pull/3516) -- Ftrack: add source into Note [\#3509](https://github.com/pypeclub/OpenPype/pull/3509) -- Add pack and unpack convenience scripts [\#3502](https://github.com/pypeclub/OpenPype/pull/3502) -- NewPublisher: Keep plugins with mismatch target in report [\#3498](https://github.com/pypeclub/OpenPype/pull/3498) **🐛 Bug fixes** -- Maya: fix Review image plane attribute [\#3569](https://github.com/pypeclub/OpenPype/pull/3569) - Maya: Fix animated attributes \(ie. overscan\) on loaded cameras breaking review publishing. [\#3562](https://github.com/pypeclub/OpenPype/pull/3562) - NewPublisher: Python 2 compatible html escape [\#3559](https://github.com/pypeclub/OpenPype/pull/3559) - Remove invalid submodules from `/vendor` [\#3557](https://github.com/pypeclub/OpenPype/pull/3557) @@ -89,8 +104,6 @@ - General: Fix hash of centos oiio archive [\#3519](https://github.com/pypeclub/OpenPype/pull/3519) - Maya: Renderman display output fix [\#3514](https://github.com/pypeclub/OpenPype/pull/3514) - TrayPublisher: Simple creation enhancements and fixes [\#3513](https://github.com/pypeclub/OpenPype/pull/3513) -- TrayPublisher: Make sure host name is filled [\#3504](https://github.com/pypeclub/OpenPype/pull/3504) -- NewPublisher: Groups work and enum multivalue [\#3501](https://github.com/pypeclub/OpenPype/pull/3501) **🔀 Refactored code** diff --git a/openpype/version.py b/openpype/version.py index c41e69d00d..6ff5dfb7b5 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.13.1-nightly.1" +__version__ = "3.13.1-nightly.2" diff --git a/pyproject.toml b/pyproject.toml index 994c83d369..9cbdc295ff 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "OpenPype" -version = "3.13.1-nightly.1" # OpenPype +version = "3.13.1-nightly.2" # OpenPype description = "Open VFX and Animation pipeline with support." authors = ["OpenPype Team "] license = "MIT License" From e6584a9b940782bb6927e807b6a19412a1fd2fe4 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 15 Aug 2022 12:08:20 +0200 Subject: [PATCH 783/785] removed pype 2 compatibility --- .../custom/plugins/GlobalJobPreLoad.py | 48 ------------------- 1 file changed, 48 deletions(-) diff --git a/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py b/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py index 172649c951..cd36e45921 100644 --- a/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py +++ b/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py @@ -260,52 +260,6 @@ def pype_command_line(executable, arguments, workingDirectory): return executable, arguments, workingDirectory -def pype(deadlinePlugin): - """Remaps `PYPE_METADATA_FILE` and `PYPE_PYTHON_EXE` environment vars. - - `PYPE_METADATA_FILE` is used on farm to point to rendered data. This path - originates on platform from which this job was published. To be able to - publish on different platform, this path needs to be remapped. - - `PYPE_PYTHON_EXE` can be used to specify custom location of python - interpreter to use for Pype. This is remappeda also if present even - though it probably doesn't make much sense. - - Arguments: - deadlinePlugin: Deadline job plugin passed by Deadline - - """ - print(">>> Getting job ...") - job = deadlinePlugin.GetJob() - # PYPE should be here, not OPENPYPE - backward compatibility!! - pype_metadata = job.GetJobEnvironmentKeyValue("PYPE_METADATA_FILE") - pype_python = job.GetJobEnvironmentKeyValue("PYPE_PYTHON_EXE") - print(">>> Having backward compatible env vars {}/{}".format(pype_metadata, - pype_python)) - # test if it is pype publish job. - if pype_metadata: - pype_metadata = RepositoryUtils.CheckPathMapping(pype_metadata) - if platform.system().lower() == "linux": - pype_metadata = pype_metadata.replace("\\", "/") - - print("- remapping PYPE_METADATA_FILE: {}".format(pype_metadata)) - job.SetJobEnvironmentKeyValue("PYPE_METADATA_FILE", pype_metadata) - deadlinePlugin.SetProcessEnvironmentVariable( - "PYPE_METADATA_FILE", pype_metadata) - - if pype_python: - pype_python = RepositoryUtils.CheckPathMapping(pype_python) - if platform.system().lower() == "linux": - pype_python = pype_python.replace("\\", "/") - - print("- remapping PYPE_PYTHON_EXE: {}".format(pype_python)) - job.SetJobEnvironmentKeyValue("PYPE_PYTHON_EXE", pype_python) - deadlinePlugin.SetProcessEnvironmentVariable( - "PYPE_PYTHON_EXE", pype_python) - - deadlinePlugin.ModifyCommandLineCallback += pype_command_line - - def __main__(deadlinePlugin): print("*** GlobalJobPreload start ...") print(">>> Getting job ...") @@ -329,5 +283,3 @@ def __main__(deadlinePlugin): inject_render_job_id(deadlinePlugin) elif openpype_render_job == '1' or openpype_remote_job == '1': inject_openpype_environment(deadlinePlugin) - else: - pype(deadlinePlugin) # backward compatibility with Pype2 From 919a6146c6c16d5f98caff3eb79792e876b2de49 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 15 Aug 2022 12:08:32 +0200 Subject: [PATCH 784/785] removed unused function --- .../custom/plugins/GlobalJobPreLoad.py | 26 ------------------- 1 file changed, 26 deletions(-) diff --git a/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py b/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py index cd36e45921..98c727f618 100644 --- a/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py +++ b/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py @@ -234,32 +234,6 @@ def inject_render_job_id(deadlinePlugin): print(">>> Injection end.") -def pype_command_line(executable, arguments, workingDirectory): - """Remap paths in comand line argument string. - - Using Deadline rempper it will remap all path found in command-line. - - Args: - executable (str): path to executable - arguments (str): arguments passed to executable - workingDirectory (str): working directory path - - Returns: - Tuple(executable, arguments, workingDirectory) - - """ - print("-" * 40) - print("executable: {}".format(executable)) - print("arguments: {}".format(arguments)) - print("workingDirectory: {}".format(workingDirectory)) - print("-" * 40) - print("Remapping arguments ...") - arguments = RepositoryUtils.CheckPathMapping(arguments) - print("* {}".format(arguments)) - print("-" * 40) - return executable, arguments, workingDirectory - - def __main__(deadlinePlugin): print("*** GlobalJobPreload start ...") print(">>> Getting job ...") From 963b66eb5808249dd47ae5e6bd62a53972352655 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 15 Aug 2022 12:15:35 +0200 Subject: [PATCH 785/785] fixed python 2 compatibility --- .../custom/plugins/GlobalJobPreLoad.py | 27 +++++++++++-------- 1 file changed, 16 insertions(+), 11 deletions(-) diff --git a/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py b/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py index 98c727f618..61b95cf06d 100644 --- a/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py +++ b/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py @@ -34,7 +34,7 @@ def get_openpype_version_from_path(path, build=True): # if only builds are requested if build and not os.path.isfile(exe): # noqa: E501 - print(f" ! path is not a build: {path}") + print(" ! path is not a build: {}".format(path)) return None version = {} @@ -70,11 +70,12 @@ def inject_openpype_environment(deadlinePlugin): # lets go over all available and find compatible build. requested_version = job.GetJobEnvironmentKeyValue("OPENPYPE_VERSION") if requested_version: - print((">>> Scanning for compatible requested " - f"version {requested_version}")) + print(( + ">>> Scanning for compatible requested version {}" + ).format(requested_version)) install_dir = DirectoryUtils.SearchDirectoryList(dir_list) if install_dir: - print(f"--- Looking for OpenPype at: {install_dir}") + print("--- Looking for OpenPype at: {}".format(install_dir)) sub_dirs = [ f.path for f in os.scandir(install_dir) if f.is_dir() @@ -83,18 +84,20 @@ def inject_openpype_environment(deadlinePlugin): version = get_openpype_version_from_path(subdir) if not version: continue - print(f" - found: {version} - {subdir}") + print(" - found: {} - {}".format(version, subdir)) openpype_versions.append((version, subdir)) exe = FileUtils.SearchFileList(exe_list) if openpype_versions: # if looking for requested compatible version, # add the implicitly specified to the list too. - print(f"Looking for OpenPype at: {os.path.dirname(exe)}") + print("Looking for OpenPype at: {}".format(os.path.dirname(exe))) version = get_openpype_version_from_path( os.path.dirname(exe)) if version: - print(f" - found: {version} - {os.path.dirname(exe)}") + print(" - found: {} - {}".format( + version, os.path.dirname(exe) + )) openpype_versions.append((version, os.path.dirname(exe))) if requested_version: @@ -106,8 +109,9 @@ def inject_openpype_environment(deadlinePlugin): int(t) if t.isdigit() else t.lower() for t in re.split(r"(\d+)", ver[0]) ]) - print(("*** Latest available version found is " - f"{openpype_versions[-1][0]}")) + print(( + "*** Latest available version found is {}" + ).format(openpype_versions[-1][0])) requested_major, requested_minor, _ = requested_version.split(".")[:3] # noqa: E501 compatible_versions = [] for version in openpype_versions: @@ -127,8 +131,9 @@ def inject_openpype_environment(deadlinePlugin): int(t) if t.isdigit() else t.lower() for t in re.split(r"(\d+)", ver[0]) ]) - print(("*** Latest compatible version found is " - f"{compatible_versions[-1][0]}")) + print(( + "*** Latest compatible version found is {}" + ).format(compatible_versions[-1][0])) # create list of executables for different platform and let # Deadline decide. exe_list = [

(Cl_94uf}$0O)?Agb3Hb<35VkiwgL<>ILpRnzm37=!0-M&b zF{A9GT(`vg!Xn2;`{6kwvl~<6r4l_={2!NmRv!T{y%ttQQ7JJ_1kQW&(E< zj`CTn{kDNjN%@oT5n9BJ>>L-UWG01|WFrxG#2z*PG{S?H~` zhoz?PSr;2)&80x{WV}QQIFnS%zn;f65`Se;w~_>E>s^Zt4s%W6(S8YpGtSk)$OF!5 z$PwkP^n9~D@OVsJib9wp6G;SVVGd3 zsObfyhY-gw-Ku|pOd$TVx$eHl%vl?$O3oe%`ZwtZydmz zrhsu_2r^8Hq12l z>SnIn%>|h#);nD*=gTkimQRQ$aUgLx@Zos!1FWR_?~YSO@kKsx-L@c^nbgGB?#1&U z>ZeoR*Wd2-zFSz!3eMJ~KyLaU{rA*eCB=A!%mzrqDH_EKeW}Hx{~Ftl8A4O{XMdg{ z8PP``=&U+l0ll9M^2mkC9R{EO#7LyZk-MsL(UYx%PSU~r)ya3p-h^+0LRGQQkb!BM zGFAG-gC0yZC=@K*4=Y!iDLCA&mAhmau_^|wgw6NPIXw>%^W&X{Oec77ra1`QKxz>wpwQEPph}wcH~17-HfQ3VSh)0L5KJ!oa%egW|kMm;){F?<`C#xh<|@h zNg z6!P;Dz7S4oVX%qxfb?z{_qJr z7okvGCDi-ThWm0pWiG;+vN#7N4u7!lskF@6BQ)nm%TSoRqq87E6W0AVL5bD+HlUtM z0igq%j_ydgv@KPal|>8g*_g784LdLe>+`=RGZM7ON0E44I+2oj9pUjJyEs$Jokxb& z?2pLs<8(e69zcwPsGSRqbbQeEsw8ISGh^(YBJvDHKA^h)ht=dFh>|{sdJi3(sM1`32DVaQB6CQ;drrRSYdA)};HAn^}h(@h1U0(uG_kcpfaA?uT5#C)P~ zE-1*Luh^^!=uoag__h8FuGI&(#H<6ps$|$k)l&Yz?J(PGvH5Pr)<(f+uFh}$=DRDF zv^}AA=O^hvmlqC{2OkAuTF|0&lni^FIoZ%tR!e6r(?Mbygy<|0%8!i=Qor*ym zcmWu`{x$#hm!;0I5K*tnfl`|qdE%@Edg@z%4Ao&Y>wTUck+(-DE$FPgkR32SLZCf! zfjebNhp$SapA|#C2`?ToGiu1p7>)2j-CGg|@5!sYp7 zY3*vh;6IMVLX4N1=#C`Mn<%Mmu~PPrqiVw zkjXrXh&F;HWhnchwq0s9vxL`oRLEynyLX{BFghQBYA4St^sL+Ufl5>|b>|ms>aL{Q zk6hJvSjo0uMn@`>oKt%GnPnt4JR!-~pN`EU4`mIaTDgtfk-(dM3-gtxUmkGtQ;cSR9sTui zfFyalgV?+n;9!BqF3>W(ZfMZ_@lkT{OOZe8+Bl zH6=}7cd+z$&_atWDo5$Um1{+gX1TS@!5c^kKg}l{?S@2FiBHDtxzi~U71k(PR}PL9 zemIt3H^lL@=z};}#I2`r(h(23cT~ZZ`MswRXcdCh-|cyY14%H@PaGeMQzoUYyW?# z2gkc@Yt``G=^=PS3KUqD-3*Mzt}jlxN1c+*gdqX`0kWWk+?4lD{~-P=LpIkFCBh62 zfK0DHTWS}D4ALEOg6tYC~$&~rMM}l=*x;!krb5|t4zTuwv2Cu zVXRQrM{a@1q;QDmu-xpkFrHi7}#=Yc&n})nlA%7jdpT%qU5kbB&@62ZYVZ%uZkb~upA_Tu{llz-l8e4v+#(P% z`rRWi!FN?g%J)vt!uyowgt?&qN+~VX+7x=C#D-y%sB=JiYrv z5HJ+D2~)6BNejr8)L3m?7+yOkLN7=|SsTAX2>&Yo69RnZNABhiCans_Z;xLCRX2~+ zrz-vqwT)2xbKDcZUN^)>10x9D$gP`khTWNdwYnZ|q04h1%&lC#Y6`37Uu|R4+^<=k z1A#a>5;jUYxi2~luYINezf5{Q8i3AQ-hLr#+VG5B%E$o58U~K{VL1c3AZQ7SPwni> z9fbC7LQKm*qliwmhq0SVAUyFB>FU4-R=Iuk?tRcCs{`Lf&9znecHwC6>klBRdU&B@d{cfRvmnWW1I%g&M7-hGyKkEDy2#%+$ZP}txX*}p&&gcbWX(O zJ@~6HOh+!rhp4%~v*K5g7l8ZiqU`5IZ2fcEs>97PW_mtaQk}98HgP8#7iV-^ZfZ?T zjV~S5j(R~zj=NCrZuNBsOAe}5RF>FGZParrajL_E=6IskhvcsOf*#<`>aMDtPY*+l zr^@Civ!LRWkQDA4kbWjfmz_|KKW#QcDLX&xVRup_8vL&7U2dg&f%~Vjlwt%4EvcD@ zcRp)KP z4^Xqghlk>qt(Lf=Hme{s>-%1a;Z#;_o-EOUzNl6G) z7Xumyes?ajTbGbe+e#Uzs&`G|?aO+eQu2Q^2deZ%9FZ|yuS3U>;sQF9@KJ=Mi@MW( zoj>w|=2XdV%rS>~TCi}cGY;pFENyXMY{&icXE>_~%NR+Q3+m1HY>wEZv5Ck1x>Y$rZ^XmY(1XP!3TYAePj1RW7!KbY6PtULr$UPHsDf{_pH_GHL$@FJMZ2&WZbo$*c zfaUaHrQ3IR8Q*5my{pm@i}@TD1o})tHOcSk4I5DNT82-~xe3;=$zw0ORByTpj5PhH zA7TtFKRXOL1`Bat{CIt4q;+Vyq!==OfT3jaLz=&@?=qvlxKmdnKSrIZYzTJpg;w3| zVNSL&Yo?hTeYN+|JwE*Ape1!{)Fnv~k1ygA^$f_(**AZcaBt!Dp<=ba&K`4&N=&>Se(B|F^<Z3~z)p%hPcfwC2c>{M;HY9Y99##(dE-us|&@xAc5u z{i~6+ULQ4iJ4@3}R~qTe$9qr?QPPT2?WxFDVmiEs=>_F|*uQqPIykrFH{$=fjpB77 z9rwxpQ&z8yZ_7I;=Qm*_`O(2ARj{CA$Q71QVr&V-_T$&d%l&`J!hiKx->fpzA-iuv zJ+w2$xW5y^ByMn9`jQN7Ae-m2M4DAcyDzfRmu53If>1GKAj7Lhw*%pN|7!Ex?*V^0 z)#O?2+A!2$y9pAhh>Fx1S*E@>5wmG-WHf`e9d$r&x+f^2pOx0cV%Cg^NDe)IA7TEK zn()*cF9;x7B~+}8*uaQ>SimiHBO<;;rM*hHS43jVdejiW@cLSXOpRO4&M7j)?cA&^ z$KKi1bzgpivaB4nxQRoL$?N7l)zU!-A&IHI$ffR0;hNmQ`S1^74@d3%qLa!{QWs5| zXmZ@<`rLHI{svFy4*gTac3=z4WcS5c$_XB;sp>ADs4Yom`pSE zr&WaYMfvP2*|iK?S=EG75=m(|r#ZJD31=6q{|u+pd)-)h4i+q9heznaHrGe=1S3h^ z)`M1(GflfEIW?0_uS)2q$Al;J>Y_@{=hbV8dGDtdz01$njlLdB;lZ$k?bkvAeCXA8 zB5*?Uy#fnxxB)W{sSdaKp1#J^vgGm9rYvz_A4#?C6p0iNqzd-VvV zS(u8Zxm3(pdm8) zVfZ(I6>L{AMl1b<%8Nnm%9S#^2U;Qe@O+uaPneV8$NTyj4NGE9ew76(LQPeYM$j`~zFnWvqjKR*_H^>ii;0Zt5WGGFl}xll5hrp z<9*%BW=#~|9NTE6II63(?jT=nI@>-nm>@-7X3)$SAB-ZI72fJSyz$*I;JUH0f=xkSVgTL zKrz`$#WU(A(yu>cx^tQ1R%Qd53zoFXSGJ>Rp%H2|K6WgB=2)bd8PHYQsR{0Hr3ddd zZ6@bUIDL6zGAL<6KY^SL8{9tgQwOO3k>+|Y{QjI_eqqr^|CaQ%8}%MvF2ZjfS=k&H zVX^&cdZ8(S3BBt##{F);*iqVt5cYmILu?)7Q4Rz36`4S`e;3R0T z%W##kB+UkGjnDm9(7SN0x{!6kn+(5UQmfvpD?@L&X$m-lR>O8(b19pV(Nwx>@D(+^ zKA0Fuwb4T1aqyin&9(6}rGKCJzu*6@!2ed@e=G3675Lu@{QqSIhA$QbZjisTy%rax fcjt;ai8ch1^p5<-paIth2>3y$>A)-R+eZB#y9K2P literal 80331 zcmeFZc|27A`#*d{v`Dm)B~6r)We8(ejg+#4ke#tM_FdLgnj(_4$mUeA2!b%-)-l7sd~JDk4E|d0dg7c1f^6K1{>Okkc*uz$%oz@PhMtBRr{xGPPNJ69 zE>uZXk9 z-c<^C8xMlJgR7^5i!(n;(bCGr%To~pto}KJldFctU&78FE0O}4iTPT(ieW{?#hjeb zgZ;kR!&ArR9~=Me)gF3&t~O#iHXbfs?gSgSeDA-TRlVWKD`)s$xLMWP#@X}l?I8RA z_nTLrV@>#bovW9-<8Lup6U1yBZJdCf2b>A}_du@pE}kwP_AdVqr2qc-w;<35fVAYa z+#SGbEFJMK1TU22NxUKkej;k^U@d1Qeb`3AT3S*>+S0~W#MbJtt%#+pm9>b(VVtF; ztPRcvYh$@e=RfcNV@TF61fcQvsbCX_t*j-jup*X{(o!PQQisJwtgNJfBvuwDDQkJy z)<*U(wUf>so|eu8n^kTcfRe4ajEtnEgtZ94))FTo0lbOGic4ZeWMrkV5;$2&tgMvv zZ*Js{x!YKJy147PxHzsFulA~J_$4Gn5AzEdI5=Co_;?)nj~i_8mYz0>7%*fB5%I$! zSV=uB7_x+%l(dMrjGVampG!4dtQ~Cq{@Y6><*-u!w@bl|tSvn)|35FbCdk>kxI0;b zK{+^C+S!P?I@@9R|4Htci=&G>JQ%o>L|x&J=I}P|4&FA_$K73=_`TY%OACc^Dk{uno@2MncloMnVd+azAM7f6oTQ{xWsK!2=}kx8m`- zHf}55IXb{Iz-8nt38?ofVmvH?cl5#NKmK=&%Mc{3rLdL~BGyvY)*`ktU@n$6vf?5H zthj`w1kM_ZJ8ZjpzmL5Qko+&2`%iWJd5njPt*4KryUh_hF!cY%G9*RC|I;%5+(fXq zbhfhrI2FU7$3$W0|NmkB*KGIC1OE>mMhrd8A0>(XF{*$1`~9>309-ix?|%XmN5B0W zKj6dP2xQ|7a&m|G(&B%p6Je03K8Zh~=lgK9$IsWq&hO>&u%jfk?9I7a^`j4;ZF{X# zpd)1LLlqM-mOgRuj$*E(V<(&2odV_O5!aIMN8d(FPY6BTcJ$n>!|QKecC_x}ahcds zGa)MW!uwsLUtizuJ3KWXa|PcudVG0E_b@Hq((pl6=FOes!?NT4JjATuvGLFU(i1ay z|Kp?3Cg#e2AKACEhA{m7$iI&hVf_0^<+M10M&>_XA@?j0*3}OPf_;TRWM2KU3S|&q z{T@NyFfm&E`$SsLT>07%s~#D_fMKu_4W5-w7)NJRf_s7gc2CD%Zn+9dWJj-uRPD zCdbF9Co@alZ)Bv3kiy=E?+n(*D+yqTWaE{)`VD{c;_;<%K9;O+(lPy_bJUi`_tk+& zls^!wp0%rNPk;RV(_ePb_I))f^_>{MR{L?sgbcA@^(?to2rK_Y1eT7+2lOdl8D`k8 zYiu01o9R_}={VPbmPsTqJSHee)TX@~O9#WcO|x-=TC@4bC!aTngY`fJ5_@xo+-|F*vexQ7K{ z`nFTWcos(Z*YP!72-=3AnbmA{(K3MH?y z|MK_UaQtuB>dS-*Z&R6C+dS^=G1_}N9lbM#{aPYt+`f7K;s&`VrsB7HikOwokukTn zattT=1RJERqJMQ1L`)J1YdiI-VkU4oVrFJ0Q-5>4Slm0MVfs==uA6>|-#7Dvh)~YyeN!#` z>y~^mi6gj3rXxQL{r%}&$z!3}U=5TV(VKa`6b7C7v~7jPy=}~ui1?(!JbPW&bzo&9E?>DEZA1#ZrU-W;NmYGmv!LnM3Noo<^V^L zF<$5DJgvMztRjG56TU*s!W~3XDMV=88Tr#cO)ot;D~ndDK9l4L(o~99PhKjzUE^um z;bU5ul93TUp4>*$afGtYW>YV9V<6T+p$7JXghNy;(Ak@)rarfEVxNt=V-C| zjD?|@Yt3V+(4!O(M`?1~b1sZ(MI%!xn&7S8_Rt~ z|Cxm+fsJ3MLd*@W@tr;m7JlqttkDoo^KCh0zI^$Cqck0#+;sJ1(J8zpuD+(*_!|2P z4?y#S(9{;+8O#H^`oUE*On?o(M1g};Q0{Vh=_r=V6B~!u)g@9TOs)JEc zpN^H(_pDmk5V$PDW*KqLFF%SaknhKL^`902;ICe$GTI8ubikU;-#sC(^s)NLu zACIc;ng`7f42`X1rXlWV>UmYXI-jSZ2DtSH|27x%*&Jyqdbew&*g4Bvjh$&X#ae=w z%IjMPk|g2>4KVJ9HeU%UXp~(!hqg4JV?X;J+-m1*s9`?hI{(53^%o}Vyw05pl(_UB z?7nrOVqmGk?PHY;72Bep5f(n~j+IR6VlXbLim$2jkNen4+>dhb5oklL;sB;2VnEm1Bxrs}aVTw-&g5@8mRXA_69ED2 zOv67E`s*94C6qUW_V-YEbKbb|tPkxEmbicVsr-OFIExp?{c6Pcbkil9`c)Fwe@hi> z*Yq%ma2l6Df#cjQgaB|hdta%EzrVley3nZeK0LyGv%xX%Y=g~9_F4w*#cUc_co;0K zwr)kNdK(zQMAr}+rv?@u@~_js^JJ}9M{jSrc&xf9$dfN}0GBYV7}qeQ+3tKY6cA)N zoK$M_Fi|>Se&}wWek9)tom3P?T$wC6MY+a@UuMTOSxX4lH(y#r5pYkex=8!jy>&Hi zCLLZfUY?$Z9SYxcf^3%Y_#GPkx;235&lWX6BiQy#4i&_=QZ~hvL8)`7y4T&uKC;g7F zo4lI}Ih6rWA_C60qvn3CiI|Z|hXyxc#@owlBJkxKo|>W#w}%HJ2Wpvm-KmQ!42hpJ z@nsG%U=RD`0%RfwPOvj&sQ1L^L3l`cs*|SnS$u(apxD`zGprSAcE8HO**+6P2wX8sD|x0O#!XPxAMlDIZ@x0zf;I?5KXLpg11a1FLgP$bKoC2VIT2!io}S{#&-9?E zk>ZQqYDIntKF-o{@$bC{*g2y6NGEy(lq!TU%0g#0u8>HDEC5jhKiCvA9{>>|fkh#x z$iz8n*l(K38~d5QBoBFoSiXF^2AoC4<}+oHH_z+S>nm5b46aZJkz)pn*_LNVN%*;= zDSPA@07*;o*OQ@+(k0DPr4B&m3w@~%e0%)7k6w6td%N=1=$zaA+7*XhVOMDIZ$U!^ zR=hgm^bF?__J=i7AyJ|bf`{l!TvT48fWGCcfQ%Zn$W)?}Q~W-ROgKq5w8|^Cf3@~y zUytgA6TaYw8#yy`-x?&1I_LG6UUi z6~wWvaDiXL2;yX}zSo%ysTghx)2jzMJi^Wr_Zu~iu$O}09SrC*zxB>``Ao^)>qf@L zl9);A7T&S6ExfB{kGY5f(k(i6=)mkd=nF0Dzg-u>( zMe`Wx7RaMwNAx*Cg~6y(zK2#g(XIigj*P}WE7Yq2BzcDd?SZz5^m70Qrw+zG$mroX z3uc3fdslv}WbX#xC8c3$cPm(N;=~v=NJMd|uK-A>rb88vz6(TCzAj=FPSZ6Kj9EcjmMQ&w! zT+!LxGP_tP@XfCh2<(TP%Z~S1#lV+hXNxS_*!S=14uyK^V6^Ko zlgAD$67PI|ePyGI*aUXPlT~;bEf7u?LSsjRTQ~o@Y$x!YWS*KxaKVBw{&mul3-P!b zw-43#fEUw~^L$H5q^Aw8-EW*LR?LOJ_m=wvC!%gOGupXqnI$ss##_VDH%m~wd7em| zy2Sr+s5m>NM~7z8;dHp#v13fA5L7)*swLKY&Y9o-_HyMsN!QWy#9(co^VCc$qt|8ezHsm!o@Q3+XIeY6nsLA!cDOl%-`Rh}po zai)OjJ3b%sW&a^m<0h~!bkmSrkRYVNHGzG~r%LvYDNy`fMDNmbz10rn7ZmjJV{$~N z9O{dTSN&KIvigwq;?<5~!xN?*Sy?l~43GxMhw}U8d8_I8Cd@_y4mi;w>+GGJ%2e=5 z=f1RD#<=U=r>x@hc@#*PE?drx4=L3^o>gHuHg)^hzL=st>q3<~#!l?&n^lG=xOBFJ zfB|Wm3u0uhlXSo6mH`yJw)(Gh8WR-h^%7%i0#(?V;=#%!>w_L|fn?Zx7t6QKvzJ7nshRGnguiYU_<;^@rL+SIMG9$Z~caa42#2)RA`qny zM4RD*UgZ;DsQw1*Ovj2d8u=_jzck+W zh{}3%DMSrYtiWxFEZyzx-kJwvi#q@bw(-1%gL?I-$;-(#wK~g&vZxA}zM5Km`Cq7P ziNN|F^b)OcgG@zQ?|VSaV~FQZ^$wEjEq`&JYYL)xd3o{X^te9*5=V+}0L03~Dhx+M5h}%Wx-)SrHv;o)Ccjy&)3 zGS$jstZ8z4SZP%91kF`Co2k?b&qx8rpIT=RJcJ49@2`WA;6JBdKa}*IAZpoXRzB-y^W{#l^*X<;#Ty1;rTm3ewz$l;z0RvMVN>^cgi_ zfh(4V6T_2okO`D8A5EqRV{SDJdB4o=N_BHSoiI|YdjlI6s~Ma}dYI|ji$dGD9!~7< zi5NjV0)$4Z8&ek&I&MM;G_CWn&Fhp(7>?Jj^LYHzq$BXGnp#b|dO{u;o(E>~k$S=u zKjmQj&J_}W4)bBOGjCaUq#L4Ng${+W$u@5@RE3&?O;;v8OlP|zBO}#>-LgSW9i$U? zNw`>CpXEa02fzP3h{}3S0yDWu@s~n>+K_@8IO1CX?bs=vRD}+&J~-2u(6B7@7w91* z-ysFcnZa$8O)ETXM+2BTKWi0BeiQ9?=y~SOs~l6;%?_=J2CsmxlM3S$kD~6)^}g2= z!)veEC@eMqJY@3HHSY8LDhrQ6mMZHFiOVOa7P3dAA)mjMGuUV?VZJ%E|CY|~oz0MA zT`4ljE^Dm;_g=h3vhP#Sj&{mkxu5^w@B0A;{A0+{t&ZzL`zZjgVu#u$&~QFVUm8l5 z<9qyWs`QIXzySAf>~~*q!r|++FLZcawm83E%`8IH(KJhNOV}q(8U&Z}K9HGs=Rkg8 z;j+`g*zc}9DcjDauBqn4Rxg0bOy}3}5q}m{%w9WjVbUhX3O{(YepM*iXTS}U@R{Vk zc4t?fnu);R2J6V?-rl7dy@S(JI#Q41ST}9DMryrQI|Hw5DtTkzwg5v=NfJ!MBpO#Mh={Ic zhRbx(|M`IGV5i`zJ5Q@U9>QOx`uOs5>DgQCJO0TEO%(*KBm9g?EKjpDEw}ad_Ie)L z-PPM`2IVk8o4Ymt;=X(6oO`BmXrCx%zgBs=wZ6kD z3WqQ=S0dU+A;ArLZ$u=^SNkT&2!a_-s_%|z7@8@{j?32D`k3JBJ7=gg%6sb0p`z(n zB@0was)LMz*AB)nH?N)z*@SBE4K&^Ix!HF7>9A3hyT_sLMN1gwZINy!-4$m|Hax^P za&>tcjuqtkvd{2^bse|rAb0ZjgB-$`zUIgk9sVUILzLk;xGppfLiR4UAam(BQ;18~ zj5OX$9X@=xSF6Ysc%iXW5a2K9)tq`fd%b-=k$MRO z?lxI}az5Yy+t3M5^JAI+q^)tL%y%Pl)(J8+83!5U>MlOMcFokwiT_l)4^{0xOU@ah z3_^G@ZF7*^zyb8^ta;z|(g!UqCUn9?) zqHYSQU23bS=A=ca`=0(M7}P5y(ALj!q7&wsGpXRyCrdY;{p($p(-xPdR>;_+MsH&s z(V5*7VlBKQ+$yWBDV1q2W;yGxEmKjK<>DfgKY>e5)#e4O;uGSOgO!9=`!fQ3j24K{ zlP_Ps5c9cjnp`=0ME=pOvA^O75keacHntIUL>Yd+OtOT8#N+rgC*!HB&cePHwa&%} ztd;{@u%+OakNwiZJmvhVj{Xz_5|Y)8ADmTQ7!m~&wOU_KrR1;lgpogJdH)?GO_s#0 z<}nrb!70Do_I+P8@Ik4I%c})F9*y3q2%%Al#w>7bjlr@Jk>>MP-hYU!W_lrYr6 z7R2LtZHKO%hYZD2|4QNvI`lkP$vI_1V}6rJ*57a?)cbgH&%gQVnb+tA7EoB&r46rpNoiCxV{*d&qAZ0<2ZRZO$qILsNb< zrcEQ{zh^}gO_eKXg1C>E)i$QGAmBNjL#h?|Crvc}OM?9G614pjFJ~`p=cuz^#T1p+ z-_Mt*?oRFr@dX)qO^^2ibdA1W_3n^VG-=zuE$mY=+<%uTXDLwSuT4EBfzj!`hg?JY z9Px%X%jZ`{af1v$g8!Xw8)|hMHdeXDc{6_A^Qx-vF=^q1{Aw=u=l*--0^JgZHnC3` z!7?I-&qHE=t{9^$C(8Qk2XS#bi84vQwr^NVW76>X$2^*)BWtVg?V&O2i4n6RA|mYV z?Ca0}J>2Tg?ee*`%eIvN@^J-U&-e>B{OKUO5a?CeP&cJ&W-i6n{1a*1L9p{=&TND|DINYWe5D)r+Lr*aK% zUl!vDc7ums`s=UkHgGnTYvh7+aC94AUqhgN>7UZP(4W>p-gET^b}fxf@2WU!O!+SZ z{NM%Jif|AT+VDuK<;*h!3VS|LK(Pl789wiq`S*<9RHCt8)M?E4MC|d`;;l{Z3v~Xf zFP(wW0?|I~KQp@9cxL+Tndk5Te9*JM>j7;cpkAT8b~bw%{^Crs&Y>M-$;U~$|K)z$ zh$6KThUT+E+w!+Qwm=cx`=6^yp}t-pflUPC^~_z&BasyTgFd<@AQT6X; z8#4Y+f8-yiLNL|$L?B^&)^pLGi;Ih+#w}kme>;BA_OGy0cVvQ*trM^IJTJpr6c^}vN&vT+B zE&dH?Gx7N-JO8A9>VG@wNY~DFJ&t0!!sO zGb!@dno^I0#qnE3U`NQ@+J2c!f%g{{=BLJ6{yk!l2##geWK?njAC%d1u|Bt5J=d_c ziFC^joFeH#rfEsR5xnO1=oV%c7IR`&AelTPKsbE^`>e@mgqxt#Zq#QqQj%>vOb@y= z>_<)*PB!h>1cMOx!zP#Mak+)}+ail0(R^urlW@@Wz~sJwZ#VTLu&RX9oME3%aE8qv zeI#ypXa@^VO>X2s1c~%ATmBFvjJ-DpaBDd_2a3m(YTOE5W|t&qXFJD}M%kl9B<-4b zl)^k15q5c*USGl`(n131A$Qm(zv_Di=ZM5ivIEiS26mqyrkpG-NZ-kxoJEc#7ufz} zKhWVt)JfBOGEPrxp4FHmSK&%Wy{eAn$c~;Gf8qTzvD(Nuzdfw_o+aV*D49eL@6(cKmj@o1@5zP`hI%3a@FkOamv(p z1r5Ey3Cz%QXyehRm~0oK*%r z?jm&=oU0CvkC@aaPY6&QYXFKyECmHIE0DHFZyly2_GR&|L1Ns-%Vw8?IKy_k@(jO& zi8j@%Ck7hV&^j&~_WRa+BwJp(*2{F(Uf71it(sF_4ljDy5@!{`#ai{ehBL2)Gw+oV z-C8NJVzF#IaNL8Mg@kKw= z3^6%e9RclpwW~EH5!kp)GUbZp++2AeeazOC6}h(#b-5#*jx#Cc9O-J%XCX z!46;7J3lq7)e(t=?457R37Q6-C_xFP<2#=8Y((z81T}nVbZB`NSX@vrb18cyVYJx! z$w;w7jhlkmVGD%_>`ecoiI0^ZjY{e`ry|HRj}GWQ>kg_Dyvs?`M?#c-vkKi9wEO5p zA(EG)_R32hHRq4n;~JsY%=}r4zFo zuDoV>Egq0KfXgAbOJ6$?KY;Us8P-@(qwk~7Ns8&4S;MQuyDhU!ZL$L2d-cRs%)J$x zT(=P+VNr(9#K*@!8o-%2L}H0qtgNi|cKCNSU>D1K0of<=ylGTDr9|bNoSgCTOtOm? z&+utT_Kl6@1>-eY5{4c2uMw2I@JfcY$`wMOF^4x@i;%vI7T>R1vl7bveSyJ-pNRm!=#nP`nu^r*d{kWzQ1&Vyb?4m>Yc#3Jq z&@>}I-Nj6%`f~NX0M4*LOHOC$YK1)*<>kq{6$=|_N=W4=G>DPvttC95J@m|*rQXiM zLS;^AK|&an8VjIDdR5tMuYdWl-_DelZ!$0l+6pj6ol+2OT_b1(-5|~#uf<0ts~u+) z#yoJ%E@_PITTN8p-g7k4>tVN|agH4v``(=C8@ z=H&sLOyH-D=5xv3+=u|$Yb8C;&K4~{QF9x6JS@ytuWygLh=@oY`1fies%hLTGHV%6-{wL=Ug7u3j!4yme}l8i=qPpt0+^UC+R0tYflXiod=7O1#XDo0bhMTU)!%_m9^;3&XhA77@R)rS3XiqJ%>~~c;ZZi zc7!XwwQ`Fcr!O1p>P|fYgmi9pW@O)uvXk*zyi^Cj^ug`nR_Fz9PiWYvm)!j6hc;(e z7g&GoR4KR8Y!PMAsAMCOj&}1&&tYYPgECbv^Q-ERl{F{{qWuh7lXci%MN%WWoJ(jl z4GoQO=zI2^(bbg{S+^!?dznhV#--Ld|%4t+hn( z=i;$t-vpn>-W&Jj7#^yxttEMfNeg4N!fVfhcq-mUGH5IHEicXXxr30JT zE%7#;?W6>G=Xvi8`A|GVC7Q=*rc}UEMehiG$*p{Rs?-eG3g-XT3Y$n7_8FX-H=mxF z38K{t=X59atVN7aImLosg)2~uze3RS-WVB=T60*w%3cR($sZ$L}?wdnS`cBsESGJER zQixTLzpI|S>%oHubLDH29z*(P)0TerLtmd*JC#gTM9ex1D;8-LfZ)3Th22aG#|C4x zZ2LDRS%HYHYa)5AvbNW_y)kNl;o6oPAdfdEJUpggbvlf60vfdtIg%kFHU>RxW{1rB zsO&aX>no&geoO~BKc7{27P%J!N}0cO>C#Ozc@z?YV zxPWoogU9ZM)s~Q|+97ur*s-Dl0v7PE`+olX`TQ6}0NnWnp@t0-GQUnbZ_T-s+e}C_ zx~gK8UQ{%{kE7@$ULfwB*(k25!=yto*6H!|8|&$u?a_*u$(N+=_a8n?G7j+}Tk|hW zf3M)#wX0YzR=o^171~6S?gR4yOZL0v+^5?N^28KF@PxJEh=3s& z&2v(K+fNNJ-_FPZPl&&w9bQGX`bW{reV(0vf%zbU*?g@wIJ`FM3nkIKRb!1f$4Ezx z3$@V2TpISuUOs#FEH^Ffr2Kk>t-(mxxX?M4OzhxmxH=nK!g3-VO?tYmaz8>_vxK*1 z!u0Nau@HM|nSbHWnHlJm&fd*ii~O#sD;eIsdpCj~6sr#Efu6Wrhu2+TMlEvSaPCoG zt4#7$|H$}fdpawUrxr2pQ)r!~H^v~oUQ2PhwNadPISb+;Eluy>yQw(@f1j*BL+jd~ zVUTkN(hb65zSNN8np_F@hj?P0Et-2-4aV=2M^7YNFjM|E0<@B)%2YdTkX=&*MH>yKQ zuJYcHbjyfn5#xf|uuahBw=SLCY(yLvR@=$$1KeoornOE3M`%txL1j=myRJ&xd%Ra< zaWRJac3vhKjQIRp)eA`F(}?UXOCMj26J1=C>)amBXOAr1AHb!^N9vB_G%*ILF#h^} z%&4;>-KS*Kyp8^p)Gdgao$5B@4;m{T$&NZKB_%?1bX1`IY|*l-S&N)aNl8)D&otcg z_VXL_XSm3z@Gbioa$yT+3CwHg!?&N`TApbPkQqbLKXi1+MAH&B58u(@VM5aH6SCTz z5kvqFGtqC61C8E1!%@67e&hk1E>F#Ro|N-l%dm`ug|yF}=XGNdpP$}KCX;a6Vb-P;GtIl zV=mi04zaBRs>J94T<{RCX@sXnRujLAr{?4}l5rMgm(t+XoqI8U99-cElW#D#ru3uOL70SL#0ikx^{}9EEXr@BHFfRHuk%U z%0p|3`?`=>EJK7X_51bn+qG_|^(xbn)wO^XHmy8KGT3{A;wkp*pC1 zye~2pmZBYN+?HTeDs#Qo0VC1l_Am{jBO`Z+&-l#fb&}+-eHcs(FNT|tN%nTsvfE29 zBuGDTc|^J#Ij~LjsYOKRVW@j_InM3JP_ES;WG+G2W@`6FwQ>=H-oJnU<8DgAh4aPg z;~oqM2M07ke0&Ns&B}}-u*nKP>vi~8Q*G~`-Uk~)!ELRr3pCI;G2b7tc$`TFJXeB_ zqLwUM#bX|pwIRQoHri*wCCW7Mgz^+HZ+{Ry}n&S zTwFV{O8HFWr8R8=cN+pr0jfNx`3{o4G;pp>n>RNAN}LL;`uHm0P{&1vqzBO0MG&NZ zG`EINzRRm85oRz8uiR>QYj#i&t4ELp{+b@K=_+sQ2^D=C~E!Lywgr)E# zJvb+XQ3zEUf?;QTV(CmOw+G#ef>YDd>Q2W=J6-qQh}>4!(ux}GDJj%Rf&cbHcIffa z16?o`iXTq!cmDF@$B!t;jm^jX_%Z?+-l)5{{!os`IG^i|3PmFl_UFN{0NXZWr4%WpDS65s2^ZG)|pFC zQ+4K)$NMB}twYYBgJA@@m$ZpndXABiu>%&+wc6X-l0n}|tr1w*Cf(8b-11WOx5Reb z0d40Zld*6}m;3HXJMHp`U?6QLMEv~uvlcoI`CCAX$P>szf!;vAWmQ-UgyyBfXV1R8 zo1UJI1l+xS3}l8%0pMEy^?=cS=K4oZo(w3DjEpGD+SG5;LRoZz!=vG`KI&n>iK#(^ zT@VT*1PLLaBz3Qbg^i`BrQJVgV2~2eh}d8c9~OYH(r!{F`9+h4SiF$99|3#!yujnQ z)A{r6Wb_#DIDiCQw5NETR4WFpKK_F|L-OeAUr4QSp6$Dg$@zsv02w=nLPVvAlVCki zedC2g%RbsR4i5S^t>6ciT0hq+tR3aU2S0vX+I$X99&I5$4tbMQym|6wj`f1i)!V{V*H>wSq_=Ij!Z;3%muuAY2u8?D@pyrFO{fSy z)63_|#h^Pj+E)`qE6#qIEevDYx!GKq8aGaHEfttBfDP6bICbUgMh-gAkbXlz&u!Z_*E zuW~lBE#f%*I$@m~%-Da{`P=w-ho+xk6tsZUdK(X+^Fo32v`3^0%=L`9Rs?B5~ zHo0sr?;PnjSM#zrRwo=Iavb=U-jd7Rz*f%SG#W^7VX4qRG5OARadGiOPfu37+o7>G zHsnt&dezl*cBZMR$)id<;DZr;8ET)-qs*j7$*1l%**iFVnh(9nvOD0%GoSLQ_+iPp zxA+LtfOo@FFs#HtMkE2Q14od+;S4`8>^D2d83y<)K9&{1TN4fAZ$_7Io<^WQHr5Du zi=y-T!1;E_%!KtbQ|~C>RfbR-ISAQ9i4WlSc>9>jw5;!2+D8pb7*tnQ>|RI>BSh!N z-BWjPe6ApEqNL9-5_C(m-m6zd*&TwVj&DmRBq!qDh75U2VrNg z6qdAy2o=)RtXu@=g7%=|q9VTlWiEtt8_u;a*48$?2>V4Fp=EOY` zDnoyx)yGujK2Pb^x{n$*gyo3OMCRzloQpm*svuNx)B^+9fVQ&+Z7@apV{BDlM@Jjw zBd6qd#>2|OYw-mmNcDAYh#~`dp5H#CVcgS50Ven$b2IAV;0Gu-G_yp%UVu(UdHehK z&-vG_0c>n`{Si`FQDF{8vhFJn6zs8Oulwwm0QuWV^lWWyWsWBl3XZ12{?!YoF6ncH z&ZeqO21iSjrqi@|b9fmdps8%AXKo&RW7x0MyYga>ay8#Km3(67uzgJiJ-y_w)=+v{Ni(xzQfMMToPyOa(3Ar<7pWgC0S`{X__T#V|{)cgs!F~7at$r?2dPpxzF4A<|MrZ z5{6HL()A7sLWW=2-jckFJt`ssO~f+hi#`z1pn%SyD=iI)$_FuOrAA+1aoMl;WWum% zUt60OY_rSXz?u`Cotw^zZw=uw9HRSi%tND56L045vd|RYl%ui(V?F)7ttI^JXI(Vn zY5*@BVUJ*>dm=cvVzjGJYi6vcA!%o{r1zu3k`lU45XFrLDuSEOp6x^bLYW(*XiA~mnGh`TaO++aYWKBTx0<=M{#sBm~(*zepE_Yy1q3{cXKA6W=hNF;%vv>(sRTSNDQHMF8^`Zu$|B=^Gs-SgVL=C`uW3W-?ii(P8Qd;1~e9Ll9 zH}RKVmN(O_y)ku-saaX3(1EQYzC9>2Q`&p+A_Er>kF^>*zwC^uSKC=#XdLsOH{@VI zaw<9VdZ$YBPPt`XO5hIg0u{7X-vgN)Bs4w-*EK%n#+I~8#I)G+0m^*k<4gP&lg7yq?Lkm=*`_7d!%guzhb9Hq@?V^>XW^Fx z&iW>#z2GX_v>yNJL-MJLW7i-(Hv2$ z+}nZli=&<|472s1^t`;huQ<)l3bW#D8zaLQUlVcTg)Nm-i> zTt)H7jLXnM2WIj(hwMr9lHytW>zGNO?FzdOjy9|9Ui`*o_i$nlb4A-LBVh;%XcE%- z!e%4oGovfcrf7Z(r^n)Shq9ba{p~_~pZ>2ASTr(2$_5UV6VJ$BJO)uVL}>M`60yL( z<%|u9?a+Me_3H7tUYqqq__A6infnG-Mp{NDxa_rJ$qql9YMiLqZJyE-YM>9If^xsWx#u9ba9a}td!P>(eF7N$ zTgcjFAgcILMQ|^mVFD-k(A6bu$$5SlNo6a96&97>n=wzGw+9*G!}8^i7e*{8!pH#1 zhiNHdR&M(YCn15yR4|~>d+#m6w-3p7*K4oh2YX%w+l(K;+-i4}i(ue5m&AMV1HCya zD1WYWs>%4CrW!4Zr6VVR9@NKlnC)nz3S>%!|a)ng&LIb4X zokusvynx6n$1yL=;g?7%J&AAH7CnF$NbdBCur18bx6a?IEvHQHq%*GzYqFaHE!rnHPz1`%Y!VSZ5V=);cOd%6==RDLfK zRbtWgl5SLg9BIXZcJNLa?5o0lGB0M5jl*I5`}Y^s_d;Pez=)noF32k?x=)+gVu5Ow zR10gvU8;ldPF@)4^xf}<&GLNa@7Yuw6u>=~2aCtNByAh}y@+_wYypNKgoMTi0_4?Fo%D9eP91h!}RdTYe+Vk`C7xd)U#1y&@)yqQ-x_r8X5d(WIXrllMz+04IvIp{};|Y}^ z)w#per^iV*MRQS@7Wq_o+XnE22FXAMz1NCpP(2GJYCk6zK@`@g_s}BU-DghKE!maw z?WY&6DgSx%lWPvyJA>?fg*+q5G4JN#;Spu!{W!w8ZQ(+va`$2&hph38^4(cH$9>F% zah*Jec{^L>VCj$78=<;?lm%{B<4`DYAA1~4{D83!EiF#~9=OIpzFsi$k^KJsdjiau zUEVvNOc!T`CdUcb(ySuvLG-R~kM2SPXFB};6mLdE?*{Cdzy9js?Y0i}&oGRHV^$9# ztH(Yo@U@YD#Q?5QMn)!YxZ}(rfDuOyS%VpucL8@u4@EIKw-${L%hW9mPwcZ?J8qrn z;NT!@Airj-Pha3{**MHXmSFCvFlulTQhTSaf;vdZFDGtC^lmN=?pLns2#PR<2ptLo zPyL#-;*{w(uVH?NHV)puzX>WHrf)CC1CNIwfwx{xGQ=MxI+@|aGRbA_RA-J(`z%ut47?~i0qP&#wq$qv4a5zQ zuG3FiV{>qV!Y}Vq(&^a63@>i{)8jMC!PRRiA1tO_wwH{T)62b}jThA9H9w;50Amgs zE%pYtOg-Ay@IVA~qr8!H3OG>3i?pO)tr0Limauj>byDfpa2EK+vsT9?RooB zJn0AjlarjhTS{R3fYA}7Cu;cL84b3jF%TFBu-?lO`MENLERpLcaKG;CULUejwc+4$(q>0CAD~B7~q8(=#BUEYtoc3|I?lQl;IJ#V3Kd4Um z1$BZsQO#nZ9CT1*ml!znphZi0(UErNDb(U<6NOva5nk5@GZyIbrXhM!j?(`6&B}Q+d+s)v z_O`bEl>rkG<5W*&VFV5G0(GlcUw1 zd9Et&7lMzU2Zt#-F2APvHh^M2n44xnfM~_iQV^6xVlriGQkl^eOLlZE(qauPB?9D# z!gA$~E@id$vs4D0G|VYol!HE;+~2c}>0(CD79S|)CLu?{&aKrJDJC}ejP-$ z{;zz2KbwP#k4CjbFdK|1xuhiSl~~C6nGjfb#H0HQ_xK5oR~0CP z#LUYu(Wb7SGrfeF6r*zBUcmJ7@U_}IFjSGUFMJb=Jr1>r5cKTZ!I0`+!aij)yyVja zyGeUg*W`wRR#2T30Z-=@&6&($p4#j?f8Qk!BpQHuO=lDpD=yT|Uh8pe$ z`_=Q_nI!o@}$UxrID?tvP z%A8q?gq*ww0#V?w6uxqgFvoqeB0rjihSk5~tVfS7esZ}6yWD6>TV;;8<%6Tc=ujB!^44qd?k|45b+E>j2=KG@M@e}m zZ%7|H88Gt!%M3hHvNsZnVPR-59+G*Ake+Bz8Q!DWa871U&T=5=KDWI-LD7E&n(Bw3 z9|#PiHh&9jepX?8CrbP1qonwdyZ*Ldl-(2D*v*PR-)m_?jamUOYQ~{6K7$;k=NGQV zwjowu-Dt?yxtYR>9#gOzc+*!C9o%GJ>d4j z06)eNy<7~Rf**H>-W_0LiTK1^e;IzcO>pbSk8>d-#R`q(Oz^4-w7^~S&O2PeyfDH$ zZ;c*7g|ew$(RaPMjO+6@fLm=5bid{r{H_8h33rm^wdEY2?2uJXlyTuRD7u{m>pQ=? zdS?n~&*;fjm(#3fm_ndbmFdK;lw*2ch`;+Kwr!A{10PRnVR!2nfx z{%(kc-Yg=jw5X_p978CS~yodQE*h1^!3|R9Vr*WpL_m ze!c6#@isA6M!z@@UN?FpN;#;hmfaYEIG5)rN5D5_Id$W=`I(nzOS!P~KQc3#5GB)5&gUJMm)oLO;tU7rzGMnWrrdVHMz8(Ez)7fdhlA zq}uK9=rhOVnYO;(U;!%lzqtAna4fg)e;mIb4^c^x4Dpmui4rN9DuoP{D2Y&}A{-eq zB|J?i^Hj=^q^KzKm?>kCsf;Buq>`ag6MnDlocI6pz5eg(eVyw%$8+C%@3q&wUTfV+ zemC7&pNXO72vE)E9Rf4DNB^yS1j?TM5TAl>)+&`(1(KgHv0D8=@T8h-Ulg{iJvjOAOX97`JKwIzI!ay)wg>O#~k+EkB!zY9uden zL{#W}{x9geK(~DEQ(V`OkvCEF<(br!U4Z#O zVdV;TN?HA!5MaRm%oApX8FfO4-RejA9tqQCc9Samc^P}6w{_fuEPbLf0&v<(e z-%CwwYf3xOEBfU}kaY2TudyZ3tS>kN1Jhq3;sbY;I|3T4NN;%h3cwODx1Eha$*B!T za^ElP;g*;`-c%dwP1NlTC-!AjG{L;^Sv3Z_v#$wQb|&I;1t^Suy2y9zJAR&v(@mYa zdtjX--}-aodA z?BOx1MpDtz+gF&zKG)y&)U9{_Ne^>N&zVyxM0_s4K_!2x9nGD#9@Ulap2r=p@?X>= z*#7AjIaL@E04{q2MMPcV+$kNz_GP8_?p=;4kFw5)`CrZK-ih7n+w*N(lW*DvB&8rb z$sI9}tPv!C5>DWjw_K8x#03I)I#NPgO1Q_W0cTtBB2?=}jMU+A_!#M~!kV6OuU{ct z0#7!{ar#*WDQB2t^<dFvpap4Z)sNcOcY)5YCBy@JBW3N&-G^6|OEw9##o@xtK?Uq@$ z>ybOP2GhBkY;A3ghKr^BQrA`rh#_M(ViW!}t`w(Qc~UcJeW99=sHh6pJgWBch9p|j z-e6fwC7VukXRieg({8P5iil`Ku2`fs3jCv_h?(rJDV%<+{3yvOrTgE=q}h+f;XtC& zKWd@}6`%i8!6j`Z1VmA&~NCvUFb`|Ta{FjZj_5mm2ChgDjtHPo z0TDiF@Z~ZOP4gn0Gxg!ahn-VCe9QKp&oj(F)z>uX@ug!USn?oPQfej=DZ;~Z>k&nd z&Ok%Et>$M7silGobLo+mzrGCbxW3&-so8kLxsjWGiMR!y(I*QxJ~mNiIbPRm=!t%^ zrBh?4{Jb%?3B6YBjYp(_OR#!1@>D9Yz66})Zdwk8W&Gcftb>2~^(+!tDg8wK$vdyF z@3uW)M6e=y`__4P|IL{xaw1fqy`aAHfA|EV{j!Z~c*6Tez0bLQ8UGAL9yX#I}<;QAxqDrCW{c1M-xeK0T4)t#kG~ zcy?(-@Q<4p#61!FN-K^R4wl{T&-5`M zU*OZ<2daz2m;~dwnoAzyowiSzWm)BS`%f>Cx3sJNdW1@9M5XIk-_*vJ1R(XMcSu*n4yPbjfYH$KEF-eLU8a_hj~3lvPIQ2%+lX68bW^Cwsi~=nF+m6OOeUXsavc_L?Boi}0kA7Y z!#Zlj>ou!_HbR6i09e`ERM(X9n>tlO6Q)UAEnh9f@BBbZK5knH?9y6JzqP4IT+f>~ zuS+(CH9Ze=3Nz59G%%r?ZEQb_z9{NGH~%b^#PUSOq1CaXHH*clw*yCf*b*9;gE|`K zINdiaxeY+V5LEPl{q7Hw+ctR_(c&GZPO|rRS?3s+mzDYEjTY}cy2b)tBHV>sIZL2+ z_XY877?6Iy6P(4SO-3 z=3Xn{1(*1ao`h{`b&dZ*>B4^fF%AP^DP~l|mHIJ+DDr?}C}Ywa<{lZt=0b~)1sDD* zcBhzE27)B0y<{F-+7hcqBy5GdXA-KDBzPkX#{vt_0QIxI8##mR@Q97deM=FKgL?C~ z`id{b+aEYpvhja<5n9oT%$!>o`UIfoh}e;wKvQ5`gw(^rfmi4!khDuBK9m&kq0oI6 zX1le&-e~&4cm(K{Et<}Wx^Ln^o5on@3(#8*ePw1)4i6K!*J^M-g^$U0Y$D1&$DImA zchP7Zj&iDfJnh84I9M6$rD;4Rn#z=F&9*y)`^fki1A_b2vEShBznhoe=B3$f#*)JT zTYda3!r|%EAr7W`+LPxnq-QAvQk}OL<47}Sb8;z7VlWRAh+Qr^zoeUA$HyaBdpc6o zd57~MiU4}fi%Etl?+Xc@wSkcb_}rt1@zacxd1x#Sz}5ta)i=cWV`+~b*|coZ20&Y` zSw+m0bK?g4v9LU$q&3Zbsn*#Y+mm~lVhty9_F*3$lT)rPz{DWa3Q=$E2b%lyL1>F* z9zk;iGlU{!6SQh7P;roV`ME!NT(gJfoC`d|pO2%X`!Vt80^F@Vw<_jw-Fc_bc}v7n&@*WY5%J7aOuADvbzyM=gAG&;myn|Qb(O@mZ@xKq1FWQ|Dg1C zqGIP|zoFoQCMVtekfAzZ<~S19BK!H)EOe0LRFwCtVz&nXTOIzhq|)-3?#yM0k|KNeQp4GjJWQvgZ_38B$m<&Gl;A{Cwl!7x}%zV7I`XK1-tKx z%pjE09Sc3971uu&tN=qMMQ&s&jogSF zY3q+fTfbZ4$aBn@@kJ?St@v{}QJ1#>JRY_SsD56xc*ozt7rM&--1yfBCt{FQr`&(X zu-tQr<32WjUZ`$O9Rvm8Wm90uU);NF5`&gB(Dj=^tvLrCmXN89M-;fUhtdD5d|5um zN+@`YzdmLkP~E!G?^nZ)ZOG~GNQR}Dt4Xpe^Ij8#M20M@c=|2eojN-Hd-Rdt-TeHk zLYvmyOHBHFV+vd*kJj}&PG>LYiSPpsW!!Qs zywLl({n4Ba3A(OU$L13#WP09{YeQe=><{z4ieDX1>d((%et(_EI6hr#7(R+(@Z?4) zoF)OW689{byj?$lVo56uA*?o5sK-G)BddlToB49BSBBG1@=rnGPBy_R(NCKs0<}^D zH_QCpP!7!awnN8`?Ko5-#zm4LNdB}oX63Kc~k)+ zZ{MqF!udtCNSh0UC6tMfs++oZANK>IzG2uGux|ET@5u8(@~e-l53Lpb@&Rh@hkc!R zB_PHRfs*hhY7RM_IrB$Fp0}aaF6MU@8vvH)45@=!^Feylk)J=WVF&-F1 z{3M}W%x4yCPG2TpPjH}R!4J8YKfciCU5E?ZcePFW(6tg@kx-zA;B!UOii(OPL{B}p z-n8Z6=ysfC;^fvEZhC*Dr=hxZB5q2qXO(tT^T(?slMMp5kCJOFs#)wy{ z{nncK52n`)YjFRn1Jz~@D2re2cxd*jwWUSI40i-nsLQa<7^lcdqo*+{$m)ojVc_Us zv2?S3f;e!HM30JdleH|ud?*PTu{dnt@`5Y%f&iwj$AGxgW>e~8cNNSjl~qW|c#Xd< z>U+VZeV_lR=i7_OD{nmMQC$NB91u%D%=N5OwFDFBJhT@4)m69s$76T(NO(excDA+( z7y!oA_1k#vWX7p5KF$QrdY>4ddEAQYfTC*p`j|8w-0wAb-c|gMslWQrEYWwJd1{dl zjn+$J=G9)QJtDw^0^R98FI+u+NayPIF{U`I!xmVFF%U4kd1UoX8?Jj7Cxle9(>uTj z3Qo8zYOoVN!KXXdjw)pst@j10V1EgFmb|#AnP^+}FA>Bfn`sdl2ql@g(Ww<(iWzq^HJ2Ljm1b5ou#ue1(0m+;NdpLh($Rdn9zE{@5fci=eRLPLw&m(4yBBo zkf%+8TL+`8xgq~RW(R=}=gBcs1{FD<>i-14b!0FiHZvHE*_QymbTlKs^7d{Phm&wX ziO14L=XlXDCmpUj=MD^mQ8jg1%}rT4poZ~Wv@O12*l|*2VyWm85l`M7szBUIb6?+q zC1+o9RYq+4{r9!P z@>Lq!$hZY((umE|pWPt+hn@RZ06g^a#1S7Yo*6oCRUh47U}pELQ@&s0I&Zo*DNeMH z2ZCYJO$u~{*R~#Gp}Bq98!6Fj&o2I)6eZM^5`K7=k_;4(GB2B$fkyVf$|B6KZnV9O z=E^)2N~_s6CQ%|gq3^QGXWOoo4f$JmmWzA-(eU+E>Cd%)RO+7P&KmkD`k?Nw4Pg^f z31hDca;B9rSYb#f6fsjI(CwvS3A*LlC1NBem%DV5r|gutkjE*cI3kXb?0~701~?+axTU8B$0XQ2Xr6rr1pd`b!7pG7;_glNv6hckvafXb2blt-v=>?hlfYV;Ug7&! z_=d1)bw(0ULM3@Z1EQR2qzm6Sq3PA0Q)m0MWa^w9SZ6rD7+x=U3rE;e>rb=re}}pS zjTCgv%)MI(tR7V%Hx!C~@k1uMXyxqBx;*51b%NYA;~l!QFSXCAp3aPv z5=5H!4kLmW`LNk>v;yM#GJkJ%`B@g>kWT4_l1$5zwTLrC-eLi7>C$g)t^mS4Kv#OM zw`8Tj!=WV4){4#xQ{5gi~M>NlYX|Z4*uglSEg%%Uu-Xrs4>{y{d3iJ z!6#_me-K8(wRqAk5g4e2Li=uv#>-WH7DAiUpKc(;cp0jdthv%B_Z?a?{sg*fzMkVF zTE)b`?rYYCdz3}S;n~WSO?29kJ?}6f{3#2S7ZYm#FikX<@E#qm?GDA1M%6blB-zZUtKRxC5=T#1= zKx_p;pBKBS`0@m%8SQXn5f$B(#8~1%pcz%6gHHQWFV36y(@Myq%$+jG3OT$f9W&#Y zg>M7cD6LGW_f>sU@Jm$1^Q2AkKY5M?rYHt{Cf6dTMtRQu`tD~*qr`mv?6*209<@)XpAALmEx_P8>2$VTWyE{Z6#A+ zSn<}fi9e&OEj-#bnF|$cB^$n(D>gu;SKDr_CjEwUrjt_GWJuMmY@Xc^1$CGRd@Pbd z@xYr-y6*ax#iG#7+ksd`Ehb> z|9hUP9I2!J8$CaFk_3!%HJc`-6^q>jj~2hkRo?7$%VF0?^&xr0Kz7IpN4u-N7O*1V zA1M9FXGS#PQvszDU6B0hx-4>&gI7tY1 z9*sD0fclrZ2k;qE`@3Ed$+_&V{#pfwrIT%Eg?$#!=!Xxjx|GxDbSkvKcZ^wxhLI01 z9RuS?WtCM}$dFnqlX)^((5 zIN9;Cu;1U$H;*4Wb5b!>Y_7h6x1UuDE%=U*_+@MB=e*y)&pdn^-G!c1cb3pa|3*Wf zr|b!*i20&{krgYPn)2E4p!eMUt9%hA>O-Z^7+I5O$@<%$`$G4t0ClaSzMezMi}|r) zLBFTTEyfZ}sDXvKLIZ0Ee7(QRd3sa#&s*zed`YUoxjO#>wZs*eV9WqmZ0BKX)ndrw z=Q4(N_7sNxm}A+m6I0bo_p4ygawz=Cy9rc(7jG_I&v43)y?P&_Cd{~S8C1Uv4456> z{n>{e_~;GMBvr_mEYT!t{2esZxB}O)3H=43foDg0>>q)9@0}n&I)%3FzX|Ix^1`9F z_z`Ok1(nY{34f*ven^T!_^_eERel zfXq;#I-Ow6-3AQ!S)aeBBNnP&wnLp#-HYl2+P=4GV8XNjXADM9`(?q1l_P7j+Xgkc zM;Ao;buVu6Bx%*9l(4d)OHC)d-okt6|GMa|CJeCE$+hNZf0ogzJH-fp%#CI6cPTQ3 zcJi?IP`@m@TX~hEZzf(E?XJG1=-0crUO~ANkKvGqUa1k~cRBa*iO$2HN09efOG-zR z64%f`)w8}17ZtOXbBj7;B`Q8Aml)@CHvEyo^A=O4-s`a=dP8f0NXhJ&8N>| z%b5?~zkSQBQP$HCuCrKu`0O_F&h(iLi}U>+>cEd3c#69?=3|N~*}xTLZJNk#Ou$vd zEb9VycAmk1dT8wfYj^y*tZY>qJO%%_9R&`YIe5RmGIiYbrn{=?X*ajt`j35I%qi2D zUC{Ldy3Id($jBw<+Qmz$B}=;&;vStDwnVR0Qn!YjM@^swy}XJ>0`*_6ZUs_F8jGlEZwhC3I{* zsm{gAN>EGz#FJqd$(n@u-RYEcT(rCD7NfGWzruj5QII|o{q*Vv^4?o}(1Cyyzv7R< zjS*1jzhLpVkV$yV%Lq)fdpC%>BK^x(PHskl1ctI0T4A3j1hkM-x6`V30%))BiQREw zIHGw3!z8Z^${b_A{}lUtyV7nerQhD&?RC=3jL~WLOyp+s*BiIa2m)vD>*L|_Vm0-i zJlOj*d53kZ)aMG3@nr&4-_LJyhQcyTWpE#FxVEvpW6I~~sTggv#kr%kZ*>f2`QPeY za+tJj&5u<$x*x$-@KU&n7)y?$pm*)$*6ME3D1~{s64!x4DfnG>40Fs6 zJItE47v9F;S#x}2l&-46+C$T?r3%xJW-yi<0vKWjpH=wVyFSV?tNxNHrJ9Z}p}3Cu z7Xo39<%Ay|!ye|A6ItOI+b#vRFto8{S3jhbP-a-NstL`RsRwpkvg#q3i^j4h3{fJ2 z&}znMYRTfgieXIL)IL0M0}1!d=-Ej~C+iJzP=QgvFU_7Vd~nYJeV4gQWxPzwF^)`N zEgVNx6{kv$BSy5df#=ShQ-)LJ2tXR#Ln?YN=G{OQ#G?z(cCGmrl0u?n;HA;1tP`A_ z3!Vips|ZL#A|(#C40Ve=$~1}@38-9Q%U6{Z&4<=>9PwQd$o?NOcUV)DWypr4hKEO$ zMRb7b@X=6HyHItrVg*vvmfAW zks=(@2#ZQExOEIi&=I}X(9XKE!cp@0W(9>b+zJ?tjHzp%09--8R;Mz1Ft0idjsInm z3A#C=#f_-c+#{E#jaff?aOc2Q!A9bL22QPffCzi_(6rE!!%X9-n&iXZ(W6YTndrY= z9f2t%_j;Gu5YG-5M(L43ZTB$~dM#$vwZ;Pny~S2jeJ3=395jFEAr*MNVx9phBc?~sK83!E{gfFj{3=gB5Y0LORkoD%bF{Si6hR(?>GLB!L-4b`>+SA zPSQ{-ew~2QVsoQ%>EWIL8b|eT-#|}wxl`57Ut?&PUM4~@?;|IN9*yO9;Ub!mW=1?m z&Glj2?=!%mEJY5>+MHy!J%VsQ_{NRp`IXXgiL;pEv=B4DUpNLTlYgO@DMSGr!e>zC z?bocJ3N3Zpxmpa?^Z6zBhLdi;IrdPOj5&k*dj0>qFET(V%@>(J^wJhu@{}&}fC}w%3jut1v?RFj{-L{crytToOc@>)G!L7PlSC;0{b1}+dz^rV-mAili z;N9D}s)`h}1=Gj;#gT-~8btmdcO^6r36g1+F-rW+<}nH*1$_>eteTJcql5`)bB{O` zV{^_+ZP#Z(*#k=Z3s#Kz)LA&7JwUUf=H``qFSuF`z#Zo^3AuJ``%V5ji?{4lQn8!6 zyL@MR`{zX&jdN0_6IQ+zD-Kp8^Oxqk1>Es?Fqgg=+f44ZkTm8_pJAh_cP?QK$`y)a zj?C8b%<2CK>Kbe|V%Z?rH}}W^65l2vee(TO74m=2c`xBb!8YBPmS& zmZkjjtIfi@b59;r;y7Qu{ZC|{(m*2rP z$1&25cF|&vQ!XO7HfwHL6;o;NZ)Ale4p`;$Yb{9kf;#e0MMcHoJfX;o3yYBc1tn`h z$;wfGqd~z6Y-+F?DqzW;^p^BOh@~V@_hI<^oIgMq*E8|(quy{{613BXQJkG48@!%0 zON3SJWzE;VRK5LmY?;S-ch%Jhlj75R!$~VDk*~c*#P;LsD-6fI>73w{+W#I02GXEF zUBzSBp=sgx7zc5vgeEY2l0Di)DIxJ5_Gck* zWiYdUIi;6?8C^4la6wO5FMIN_uZy5FK|GScz8{pMLBhT1$L#%F2!^i|LaXd~Sc3 zAN>I9BbX_D3||WQYYRdP**fPNxk5}?67)1<@wf42K|N;&{gO?1_KG#4ENai9VexWY_B zrR+B>^_{!DIr1Mt6SIho9eZ>Ynd~?Mt*w#u1fB+U|IiXvzel}P^<{$afYA_GODYp? zL1R}-qAKN>?eTXQdR+Gm>}37!N@6OFg+O*BJEa$c1T}OkXp0N``yW+E2rT&yvvT;< zqT}!4F|>fYfxD_I7?fTH8j}OxbxLL_ul2AOVob>YdpFidG^3^c}{H&LO&*Us6 zzoz0j{=SB3m~#MxOerViA=)3+2rkP98L^ufYBDqPjndRKHI zcyeD4_Z`frFwa=_M)D8*rYUeG&=o|DRD({H-}ewY4Y2XTg}rmVhju3ue!vto-!dtX zw7q_6x*VoL)KPd3=ZxB%@}!$XIqAS5jo!8W=ImqG6*4r%K&;bkKl$anivPct=H$|K zFsRML4)4UXs$0IOG{5-ohkmwXby7~JmSXUH(}2=a(f0^S4&a#Tpa@zPi82b_17&1` zhUGjt9*oX#7S>dz57)8q{cr0AkBl7hg1Wq7+*Q}H?py*rJhxJwv)R;5NaK|RHJ%Iq zsTF9kw4;8=S81R%?P5yYLxzkoLHm#{yB@*IH99LG*q>D<@<5;(zLv|C|L-bMdGzGK z%826oeM;R^)5NHuZ;yEY0{nH(%gIV{JCgk_AKB|BWhQT!cpy#^&|Q3}mvz9#T=g-s}5k?5{A32p^>Rb}uDP!OW43EZm5V zEXSars(Ba-K>6{C;(O6llb4!ZnH5$7#InRr= zC?5Yc?Oc3aHK4e?;A7F-ufC-p@5sF<_}H-5qrGTEv03p>+tm(1CQH{#B@kD&qXqQp zuGuGZ;9hyTpX(@R2I>_9!aLM>`xai(*}*h*YH~h(k-K)$r6+TCQdS?z?2{!wdrJG! z4%(6S-rmvxH-Awpsk&Vj9V=AdVRWFO&K;hGM}qYzY2I-k(9JR#*CaR`-|)?_q&{z3 z20iz_sdP?W@v1d03oo6Y&uo;6(OH?Z~zw#&H0}^H(Sb)m**Vd)#r5(@Ly%pYfd|hUg8`IF#Z~A-A z_3FGDb;nyyC)Qw6`nyho{w_v7Q+4J|iuzhC0p zLRlxeI!cXls_u093pP>R%e$BoDY129u-B9r)DKy|$AHfLSxtJUPgDHQIjK94yFeL_eK>grdUjuD@pKQ5=y_Wntu8BKr+J zaPON7NOJD9@qceK!yei;6;ymm-7)<^DnpK9d+`rWg8M1B<2Elz)#wg1&XR-g*4Sd% z-^fXyat7>nhhxHR<6)Oy)}x%^$r9P2%%<)hB~`v&r>aGdqQ7}`C^B`G1_H(2*d$n} z+&OSEl)UL#2QB;KU*HJ#w*_UmQVjPF(x3HAiIp)z%mJ&BRNj*Mio zz!EGF@9GjN_GW=4=p&f-$aTE5V-=I@iuj#uYW=%|jj|^st`cy<)Ym5E39eCL!Ee5o zyycQ)@NbWELm5=k?Jkz(RQ3mr4jOaar{fHfP?w^|0oahad=6K*MpMx-6B|lteV&P$ z!bt3X*>~GYSoj;8I%u0dT8vVv0e9R#QB*?hl|xkh-QILMb+F!GXS?@~3)<>Qw@;X` z*Qf1}h`*rqO6baf@hG*_Irrv`!JA1=5>&yvqvY{zHXvr(`DG8N@O{@MAKnUg&*oZk zu)vMN@>y6uV5P68_&tjR`g@Tz&a$icCYCk*AZyE$hcn<8Q73n@g9~4DbkN#@9hnAQ zO-ic5O-0h9oc%Th=0>Mmiw_NO9_Ot)*g>mxMgFkd4#}_%*#%+FYHzJS&WLI_=5tY} zyCg@!4t%F6y6QZ|k^Ic2aKpW4I@_R2&fjurD~-}lTB!Ucrjdk@Xw@XWwDBdTkG^cML%t|bAk538qil`;cw%M>m?8R0s3Q$vyTt+N-wY!QX)RGL@ zJ!Y4%>b7G6j@zOJY7EA|YE`1^shS*Nub0WM)`&Ry&*TcX;AQHI-xF4*m~8n+=}d=@pypnSyKa>AVz&M# z-I{G%I#z9Zzn{>pxKpWuHFl3SZA92A!1C=(GI&d~M(~+L=${NarFvveNgL#Ig&V%{ z9_8SO4WY}#n5EOLUpC_;jMJ%Z%=vnOIB`8NV8G9v+R~p*_X%M z)>f$N*+ugK(mHHxlkh#(E&PPKzBPk3FjByD$9pvb817w5$v}2-zOTsw2aGBACfRF| zEWttx0yJ7ON?sA#dJt`qYu*NhdpYiybtMb{mC~aNp8KRouKBekgZiLBEA-s{^?LIs zD?(AiKufIHDFF?d0G^sUWnKBEgLcDtFl6hv9du6G7*5fpL#U^YEf`4_ymxAPp4~(( zs9wp}^xt*nI(Zn1D@pA@5hjPH<2i~N%f8^oJR$7fWifTcMzuoS@ome_&SG}Zn2`zA zRewCy>^V_WHFNHXSA-jL{qMTUrB1V?JToN3j+>fk?-1Pd zF^n*cXPZ)m_<>gmrsBbW7M@%PHvDms>?1LothoyWr*a!kbkM@L-GQ@wzPnFJb@y}= zE|Rw2h&?uNYDSP4xaYWi3rG#6#DvL84-k~6R3B#gINW!7$!ZCk*}>gn{KtsBKA9l# zrXcNHp7nE-3L~cM{@?4(XZ`;PHgQcQORvcM-_qey4s6#X<4o99f4~ulhZ(o-3dB#I zx0zb+bIspn@mfzS0I!8WO0X#P(#*HSx;A#f;J-ix^8e&*)|>^d(@_bPpw8Lza>$o3a*5~q9UO& zX}PUF9keyhC2kb!u#2&M=fk|vb4@*+ge*OTEZBkhw)HWr%un4l`1f>z-5(B?maJH} zbM5}O(Pd~uJ!Q`jy}crri^}b0;9Ucyso{}4!m|goN{Bx1J3`2q(S+ADX3|uDz+_N? zZ(%)dU=YTac(-}1@E6)DiIS#an{mAk40E3HOfDwVS#8yrEkh(|``@>VcdR8!-8N@) z7%b4>oCUIx!p}LqwSV7p3%sYU|1EebQ7-7<`3*%uHHuTawfi!gB6b!`=A3`*()0`W zq>^W%x)I+xYv(WjsWB?Bgi*n^<%N=}|F(7Oq?9+)oPRC$`&s2-96&KQ-=Ygpn`#y7 z771lrLzC@Wo`v0PND8X+E-!cUU;pLJY>g7VMLMy4ku48gm5;wLsxl|eEim{9)Tgaj zEfUceiE;5b~V=T1$sf-GI#-Gs80pycjmFx&!-kEd}VMw*0DM6LZ3 z{X2_Jon9_CM1^n5fg+Q2uwf)=#|OmltnZgdNx;SN+JuWXXX}|h-&OniHl96|jXMv6 z8tRdlWAZ9aA<8<~HPlrn6$!7btW{>35AiX z-e?GJI+t|dp6%F?Dz6f-qr zF$!pc{Q4;H;yM#{E#(p6ux~0SO#F6izHK0BW|Kf0ZK0WJogyNzAMRL>b&MMmj8SbE zwN78!)!RU|mq79#WT;t(xQ>rNYTcciWTodZL-b&TKi%4Nem$JdBtc+O`_wuFGr3F@ z`FPCIGm7ePTWb~@vD-fBC92g?B7G2&`y7b!Kj8}7S$=&_f|k+}+Wiz)_NjX_TYjES zOk?ntWQi{wf=gPS9@H{c+^F(Tww^fsNW||ANu^WeSxtnyElQt*+WOS)qgia5DF<=6 zsBDTk;_R!OodP?avpl4BImFtiT3>72U&v9RUP*o4G?BqIQ^xGESEgn(iJS4N7w@>^ zdu~xnatSD{+Vj7|>@~(`$?-wE8@W&vEcq_$)2+ZfEjRI_eK^#Sthz8N^Ij@ALTWL?<^m* zVe=Y_FX>gPGT@UoRrqpg%DBenfbV=3b9qn zA`Mome#HreyA|SkA3yyAayKdFGj${WWht47>9P*N?011;p-FUsqM_3A=Spf(M&}}U zVirs`-BYyZ1Rok3B4;&tSQgQw+kG>o8N-n zC=rX+2+bAOFD0lAm5%%jjFuqk+k8+2rr~rDl<%VDnB*yKWl42$z+d=a0%vkTz$N!6y#et_7ePI~hp4JP5yeTbB)^x}_J&i)*3TlfN6D~l z!Jx457bqE3A?p+HDif+X1Yzf1U4>MBMv?N2x&tF-c~jOapPk1o>zL6+@La+(8`s-S zzVUmdL}x)D>mCWyt_Z}prg%nv6HWY+K5=k&6Z@k^?9V?5tE6ICbz%hZ%Hh(H_0M^4 z#ifS7BLU?fqkwvW1r_yiBkA9PG+Nck5>4n1$YlilflV=hJboIJy zkXGWLNz|E%4=H=*^Mx+!xrLj3r%QH2j9RTe@rnYE%l*5X;`K)#d&VvHoxE`GX3K_a z4O@+EDwLWOo%1ba6-VAkeW>bC%zolsuG4lG5u&)l$fiTFlW+RgoKzkaR;_gAa#AS( z%ktFf>w-P6YO<*2%M@Kqe^&it_|HW7Y!3Dp>Z%*FV02kfy|-Vkn}4ve6@${cGPV6+ zRG~hv*~tVN4JtU!89g|{4h&JX8FT!(3pSn-2@w#atLDR+P`U3{NBUYak_Gj7Lli~? z#O|o0&W%EBx>eHGO8qmS6d(#MDgl%cyxSY4vZw@H-Bu><^D4~nX+F2V;@v0e|N_> z$VbHgRXV-872hv<=6ZCeU3A0oQ4X1bc<(rHnF9EEWfZUR2*opi=?1UaiL<<%Bx@Y5 zBXE^6YK>P?KEiH$b!xkPf@YDSq^#+N%T5E-N4xnKKAz$|aNs<0 z24kLYFY&vSzkZyDon6f2SV*m@2wqNEFU)YNzn_soub$j8N*M*m$R#beoqx7=N3? z)Ku9NT~DXKY#u2f>sfzknUc6-bTNXbN%4^6vQdFd*ZWY2UQZy+;SfFq0b>jjjN_ireE1XS};l|q= zP#=`;{&{^i*S{~iOQ7y?>1x*1bn>=zL_E~VxoGFo)R=$-mHNzKl#=Z|#h^x`8ytM~ z%!xA{ZUZl6e!VsS{$t1b#NK$HN}$s;HF@6{LqZ1lAR)U}QET^; zKl0gjSKXKjI(ORGAPuXUXwtdl{<*`TV0b12kuvj|=+@n$p|Cx>JMyUrZdM(dApFxBA_uV+V6GAQ#+LQ^q?IB)`5fkW& zc)F?Q+9T{*C4ft>jjWcOKSYC(i>1UYsrn^N*}G~>>yiG@=k2xk`LQ9)Z85Ts6VSib z6=v85tF=3i2R{8=o&)|6*vk9HkzA`g)Mq~lZCM29_;9!YH=S|`I`8$-`abpAcJo90 zb`gq>#+uB80tBGU9q`0>oWk6g9|DtYGr-wZiWQfpxS`6!yIniaeSdB7KTf0GM<+mp zvhX=2zkIv}uqY$E<$3w{j8mnRO6t$1YjJVd3#uOafWYii3Vnfbe`sBp&hp6&bs zbw7|Uf-Ohz*0p5h*Rq~ZWcZP$Nq|-nNgSOt`iEg&4ByK0+Eik%+JTG6KqbJU4$nB? zWp@t|HT<0T=es>8)=^@kBzbgg%J({PJihh|_dofgIZCa&_KRitf(`s;DsTR@cifCo z>DUHdo^&q;FCRO>5Uufr5XRmm$yY5o63KHn6_sYlSx2}wDY0~PKT%ROh4sA&8@-6A z9FENIH^?SmSMB|@bCjystO7kcKV_9jxb(XpWQl;BXiTE1x;v2b z-)$VLgp8Cren*SdU|RJj3%lUygPzS?acu2kCvTlozszYecJo#*B%IS7% zVYz|im~{euv@upzt$BMxsqeYz5D9AkP(b9-lZ@o~COVQP0jeK|l7mw5<*uM(KcDbJ zGm(+phL27J{AG#r!3rxpu-4fZN?JP8#%+cjtM0aB1i#snJk~Wmk6Hs2N@ZKYRWq*U zYq=NsRILjjb=Q-34k$J&u@fbEmojzOSdQ=f6p#iqwT_)_C(sPE^4d4&QQ?C{`0p#P zBk`_Hdotwq4x_rSvahW(!&M|^1rl=9 z$y#@Ruu&Y$``vX>gQ4iroL%^8QR<$e#`rUpirP1`xz3$523_ubL&7LUUH^1B5k2h{ z;om+2DBzUu#z9>k{K`kIsgxjRxa-DDPhP07|4)GCnUzJxD)Xo@ zRYIQI2xSZvHE9YOhznb;?7m&uy=$GE>l%B@1&&b!eCs*7#8!||A+X=E*z@t!Y_k$w z{$+bj@lxA!;K|D+og``yB%V3P$m{h;&anJ|hlclj-ay1=X;JvM`JULSCr{7}5rCjLK1 zy~pxQi~>~mB<6524T>9-R3jfxc@Rz7q!`LV`YpEppE&`H-9l%6>fa{h zl?jbwD13IZGE76C*K49iY}?}ebJCBmq%2HVOIqEFl#@;YqSgbTUF00?;tE#0Pr?Wa z+fX-->UT(iC2aq8YwSl$yngTt@~=ma|&e=k&RJwovtdRlFWCq?p2eRrGpEGI`Dlf*x%eOB4L zUYQwlma+NAoAO>IcKMeWRS0LDn6Y;8=@K`N&CTBX*=6MOYn4=$!Fx9Nv|9s_2q3o6 zf0SWW(GgE(ZmA=#Gp= z(6_fLJG6QRZ`eQd$^*ro_(;fKZP4wOxU&791e(x&gZCGh;vcLKp)UkCHRsH2x$yHm zikF40|0NtBb0tL7y~NDtpQ}xS_6odIv$>08^2g9aIHfVM?NSggm?pUAE10IIPl@$L z?cxo!UsL9`b)tb15fheMRv)0)LA!fFH}UihMc2D-tX$a`#SkRQ@C>$%fv`?&L8 zhJQ`1JIm+Ogo8d|F%2F8N$|4Qo^)!#IhHeL@S#KNGjSalII!%E_h+rn3uD5o?umY2 z!D$vOE$ZsJjh7vVd$3=9&bwUX=3(QzPc%|?3e+1|AMwp1emVGVPwHcstXI|j?aWV(P5 zDj=%L8v5UmrJdXrEF|-hftexPQY>vDzwCoYP*cK@&oWt59vU)S^~(*B-7sS+3EjVH28sGvdp$^-8iIJbV6+A3kK^kFo1M90WaAjhAG~&fZ$=6F7m- zdCu*3o)U9u#e&(FT^&lQJc!)JGsj~GK0je!S|pIS&aC2I?a{)n3VfW!8_7@i2Zj%3 z&2fPS_Y8*O2P@T@iWdKvnsViHTA(Nl` zR$44#cahkov3F;rLCNor4~qJ<*BV+|_+0wXe{%Ek2Z-_1f`u{@uMe#6&y{Q(VZHGq z9@1COvPPXkyFT21aS8qnH%MK^Wycw(Rc*)d<-HE>aIT!ynD@yM95khy{z!%duNw;9 zth-RRa3x6{^exv4D<7Mf7*UD%W-+r|$QynnWldX|gzXi*kKKFWz~!(q-mW$!)dR^A zXJ8DD23|HzADMVq_S^1ANm~GydHN$Qw*GQBrTmgHIxe9FtF;y^Jj9-%*3b5ys;%wP zr&Bj7)+)aw4zmSk^aFwWR=3+fC(rezv^sM3?%UtBD+$Ss9H)c06 zKg&i0eSbUw4mu&a)eJdJ|!6DmFeZtF#Ys7R7 z z0oS>a;g2)impvQ*k8pdV*IeZ4KP@02&{e6VcHr?8KfLLmCmPMiI1@b@+YcWrjdSBr z2?OmWkV;D6)8Mw@zq^FZJiSi4__Y#iq9*Kv`^~w_o#T_-tW~E-<~Z7=s8O+E-66>s z1LSP1L>TcYcRE&0su)^CD7HA<}FvVI}_XIlPmcfvX` zNx0Q=-mWU#>YFcD3)G>N*$(MEkAKSj(G!ZN?Dxk4;{c{O)aW8z0cPotWO@1V(|t70mm}gnNQE9vJ21bhSHkiQM`X&QJojMme;?#_+YQQ;9YVmeZ6K9X>>6i|ytn!U}T{ zl4|kU;_~vF^Ol>W*H_e%;v!{CJTJmz&&;IMIkgp9Aq z=f?zwh`u2dszCK@#ZVv-L!w-g<}_K1o-$qf_RWf!Ii68p}3W+#;bAI_>@PZ z)j%D(vD3>n!dK|F4P$*$l+_B^ zY2(GzWi~V`F}c;#*W5Ug`d5&IpfQOw4vY^3Z9MV8OGuBB-wH5{5EzkKvzuQ=ev?UC zJI)rkQa$*j5xW+BVj6CaOqtG|Cq8s}`SSAe*+qbOunfLurEGaL&?vdXQ944i$n2tf z%T9K)FBew+HP3Q=*r2qqcB_5I`fbzSUUbNYuQ*kPY0d8#=!vTW!wxAfU$IKNNwL^d ze9Q8XrF-tQZ6x=F*&s(s&*ZsrrUXQtzLa8?et}|hBMH&^E3b9SSX4uuth(2vzbn`x zBa?Xn=Hqhm`&Zao76t^x=x|>6tTIwMOGXiDW3Y+m#IF*6@u%Li>2Cg;-;a*|xigiR z(RvWTC^^T9TKDmZiIv%0G+y4IIx0y!*qkOZ?P4OHvzylS8m>jp9aHJ@X2Rt@PJAHA zp0=&&YP=kmq=UU$0E$4k-;CsGwZcMyLC<4g#s=g9+0+3A*-TJLDSeI|v@cg!GK&bd zs^J{_3eWPXnRTPWu|{_e+=^otQvQrlg(}aK)OSLylKavMvdjCubCfgI=$^p=;+vg2 z{UP!+9J3_m_}rCMrt{;q?*F|OnT=fIM{d;KP-*kv!2|i{$2pXkvhTK!QYYJNH~YdR zA~H1CRfOAu=cy;-f$B38d2&wbLjkGS?{hJuw!crhaMdaCSTS6QxdforwGFGy{$V6j z64_RqW-6ngt`@cw>l*4zul-s%Gc$AeNC3592-!0#d>KjRPa{h}S0B^s@_I}~wajvbHs+2V~<=ACYY zLhvae3Sj}!-M*-3*UL3gTk=qsc31w^qPF8hX&AV}^3H+F8yH!#Yd@4feEda+&pdDO zGTz`KR9$;;A5-K$;$3;q$tIc+Sgs|5MS>d7poz>oQHqVmTG+~s2rs@6kg`j-qQ`dg zRYYM!5|ov&oVd>`CG}7uH>W>4q+lZYr77PKH?F&-!4Rq)T7)dL`zn)*88caGOzb zdFp(Tb%3%71p-WSVTyUnNfRswdHyOvt=bE*6UbUyZ>?(X) zG~&6ZYT?;rD3{;l`(gYYJDKrbUBm>>m+Hp-c|LNK%ReW7Ft_~Yi*&lVUHpfD&z=)u zW?7ase==N$IQ~EO-us{GKl~qmBB@hR87)MSy&`+1#6f0ckI2Z*L6Oloud+EZvd4*( znVFqJ!#tJkn8is#R`%z5>iv1&Zr{J*`|(4!SFc+h&&Rl~>v3K8>waAiw#H~8gfC9L zApXv&RI$)*7(CzEla?7~B;SpVjkQ`DLOE%7yzFaWvV5E!0id!U8XdK^k-dR70p5YU z^{JkNi3&A`TMf?EH^Zw{cVfJsq-0po6P5tGEep$7#MmWQ zvOP;d(%>2OJbjyX;TCUQFeN*s^f5Fky^q8g0QL&r9hh$_Wuorn-HK!o2-(T#OaB3h z2E_!1qu{-6)1VrT)QWgqs`nFCNOK;It-1<|tBlUnqA~G(D=5R7Od30CAKXyIrXccYOpsM6wJ>WHeLiq4}4rZ^dcgh1kSmf{fviYyjtl+ghCj z5dP~2rNO0{yLR}py~er~V9hP!&w!NmH+i?gGegmlDj`515i&|h80TzXTxJ9l2f&I*KvgR5_+R_l z##0F*R)HU}JgHxu*|-F1R}Z>zV5Pv4Ch>-np?_>YMwSNpsexNLu{S)Ii}DhfH?v+n zCJw`HZ0F9_dLu+MczY6Z${kU!q{*jY+RxT_4pZlSi%B)t*g<&_b|?1E4)o4EX8QO; zGyuGb&M7z&R%^0FVt{tjVTRRF+t)Yq@tZ{RN@BV&^pj+q;W+U&LYB+vIeLAcL@m1; zFnD;6Vqn}sDnTqt8(9X|)-9E$oi9!!W#(~+G?oSmT$P$+)vr7)plM`vjS&jXdO{1y zK>j#hYTNT>n(NO&GC|4-WxG+=A=F@2vVnnfZ85$q`$-l_>Kz_#e-oJaB(@A~z$sEf zPoTmf6DI&w{RUaRTM&r{)vb=lY^y!jw;2ll_^TY+GtK~s0}&|a#0sYD4NgqN(FWrN zfmzetMLXmPsf&CzcrH!}ur5VdCK zu0zY^)#Hh6N1#RGy6?A`<7id4{B&vsp6sLO^7QHx))3VXYfG{bR&J}jxI;1Usry#uV^9;hX#(-h44 za1aGMv|6z1lhBNoY{SO&9KBi;02l=FH|o@YA3!Y{YApp;R+m4HdJFMT=`potU%Lr z;XmY}|Cs^$1%5(3dX&AUbl4MclVSgPJT_nVBYZznl5eJFKH)`thv>odjzjYNS45U$ zxi|fLac36yZ;4;`gV^mn@P6_XxLPg~iP3;kUCI~dV6S-4!UO|1z`zMG@DaBzPVDNg zM~~WjN)Iqjc886GVZl%^qQA4djo$;%5zD-vF#XSn_yfnb1w*sU&}BjCzpR0qQugPIQ*ir(tvIan#@zn=F#{QQ5bK>AY-hN8 zOzDE(0LNhx%cNSwL`%@}?8vK0F0W7gDu`{c1Upp-mnhRb^lM@P9?p|IG(kImB@No*$L_SthI7vaBzW&l ztby=14ZRS=B_x*0Z@<~po)L7kd}D3|Mh?xqamH61OcLRFQGGJ~%` zBKfOa-D(-Zz6&(gkYB8=Cw4@OS&JZRaU6ZvL}A3 zAZ!EG>4uNCirxj(b7Bz_r$s0&8xowF}L_>dh{+l2a03{-^qHOQ(*il1=kUHq4I%z*wcgJpq z6MsfjG^9j(G|G%lOqtH31n7)^Kv=GW;dKV14dw=tmG^y%>8WwY)mRbb1Bo7RKX`R4 zD7rqO_Gs1Jmw5S&dElGXw2yxd-tN%bb46US9(qmam_42rl?@Ja1)`8Vyj5hTaQ-|A zZpj63(7t@JWuu`;_3W>G<$XBg;lH@>vJDlY?~)&O@Iwu>Jvk?O&g1 z_B_?#ZcmZfY=>DO|_WXftP&S>vFf;!HA#@0CleDEhT!>m*toX8=%uhTVy~ig#&-CxkV-r zDxbOo)4ma(0sg^Doqa-_Yq;#r!g#L<9hK}T=l?x;Byo@GXk>rh@w1N;SQ*ud;Ne2K;f595Un1kuW1gv4~j8_sZ_; zvIlM+_+2O+sF4e723l=E`u!q1vbwU{QLq#?0)nNEl}qCEme2&J6T~%s%D;RnpQiEa zyB@00EYpGx5CRjpDz3gU&)FAl|CGDH7$mBu;9=Pjsqa<5mO||GfO!WP^cAWqTPQgI z+5;qd`B2iqE%5%|hXVxghwR**bfHHt+Ko=V+6$h`5>bHGL$`Z{D0L3*UvBtAE;R;{ zA=Y`<)}Q^`z|a0gF-IahT8PzL0Zt3!&2mBWpoGU}L|q$wR?yH-=)>a6+sKEqFp>H@ zXwApN3ZUvSlQ*OQYg+<}LHGV}`(J{T3V3Spw>=y2&@>`k0mN-4EHEgpLg>N|YqEd< zmhpJ|`V=U6P`B*mQ^owJd{O???uJpC@22xzJm#SWG*b?NNX7MOtJlo96_Ci-ikPJ- z7`Xneta=kV@CMvcSIbQm?iXZ~M$Be(bMa8>K&Q9KuLVqHeo^@6+732r}6Y zl=j2A$nPiZK%n{vUGH?j$3+8b_RvOPG80duk52{kTs#VixRPP01bPA}RrzY#8IAwo z;|J-m>k}}pR4F~-u-k{Qf(3YhX#NE1x9u8^I-fG2X8jY|Ba&J?fO*VsT7^J zBRmW>YIa)+`w$b9s6uiEIvpg!LK|J<6TVT{yq3DvM&y97><+n*4=dTgTt%Vh5&aMU z;oep>_r#sQVYt2R`a-6FQA*^g%r{GrDJc$%_ZjMs#-@ zWiQ33>XpfvU39R@yOssg>aAIQ&)Rx4J^!H=&74VdOE)Ldb;hlhisN~T7w&QnD5k3v zBr=U;rYyOtDYwCmcit^&i9S@LUhNIz(;3q_9f9AxaDy#l8q7l4ldS1W;Oho3-9Pd$ zcBuqfXSkcfKarYfUU`c8hWw~+XbH%EqbyVKV5Ar}488VHm@&bdo(sshPCbG>mooU0 zHP%1EzI48(CsC6Z!(KtuqF-)m0anKyg5!0K=Y;AS86&o*lhhWqrjvJm-dOlwL)&&e1<~|byUAjpy|O^iCjqV(zxh{r+jgL z`&>%vfcer7qB_Fi1g-{`18h_6T!u;OKXo>lnZCFn4-D@a057hC?{_}M3vw$q3($q} z66qUA(+=n$_7}UJ%Wj03f8)x2gcL@Tsw+`rLia%yxUDzB%FLgM+5QJBzkB_Bl$N~D z3VB3KUOr_#lKr&tPQXI(U5`ksP`I$S-0bYQ1ZX)VGHYs*n{6HAW0zLZFF@bo=oOaR z^?6~lyVleYMUVYF_t|}tp1M=E!0F^L6Eu;Jwg45gm-zzk{>^QTv~cxyDWJl3^epBCkUs@u4n4&er4Y zc*B-2K8g4G9WkDtKLnuoZo>7TU27QEu#CL?y(I5w&*G$)RqPe?!3%&%*3~(;q!KlJ`B_y6LNSD2JC;RLRREmL=wqlk_abjp z=8op8vF}$xaUo}xsf|pNVS?}}c6LiyIHVl+lP|TJQMPwDyu&wF$Q#-LBzC*qyZN2M z!SOyTQUIKhnlYV;PEw2LbS*Fw0SI28!XT7p3)#^Byowkl;0kqDUq{2QDDWyh>Q5(q z=Smf%Y{QlTR(GQLcy3i+i&!(*k7h97Ct+(XVQW9#>^YY0RLX4u=Ra>a!;uXJxwv7Y zrUBBMG>9EDdR6mHvCeh#HDlH*(Aq(A8>q?z+5g;auTZc1N4``L0%6mJ+}SQexy1?E z(Wemmv|d4ZA@p2D`gtD0qT?O}vJ4ClC*Sxgyi`<5-<;KX5Avmvju#;wGi-U9g$zjo zxIWn}*2?OYzq)OYE&|duZyKLub5ZhP1%q(tzT$0f>6|=0BdzvAN08ssb*CX(pY#+K zH7zb6T0v6T+%EAx{pk33={_XqJ{qPRi_PXu0%GxPm(z1wUwWo`|2TGDBG`ANeC&yQ zV(W{oJjB2dG&H^ZhC!%ME}tetCgO^%I|S=dC0_S6xyhm35u|yQ#0Ql!XF1Yer~DQ3 zO}^KSsHN0BbN+t&8SHqkC1G1aUS7WIZSUk(eWLNBr()p9B!HkowPr@Zr|+-9QTz@m z)a-t>k21b15La%&V2D^_mJzn>A-9#21sUb4;(Z*SdqFOnJ&vX4OKMhDi$rIO&a7@5R;a^ zV|vQ!@!HHd>Y)9TrY;9~a1 z*}U1dhe}x%Kt3d=<-NMuFKf08Ui3}AyLwDe-b5#x4PHNd2JE*3h^{+4CLVaVsk5EV zQ4Hj_1ri;r3Snsw*76EIHN~}*0*d$r?jcbHkc(FkGj@>u1OGda zagD$GP`J@ku4Sy>J@``Uegz)zt-TY_oAjKB_7tB_~u9xr>x!sq)yTA+h3B!dWeP4B%DCK0`SpLZ+ct@9LvPsGHV^I6x6;426=dR_|J%bNZC!O#*5vi3C0b8 zKW%LXJa<(b`UJ#O%HV)kq16~{6N{@bMG3ZOj*XpNu}Sve`3cC9sB44c(Bmj^3q;HW zU`kIK(`8BQDm^{DFdHkY)=%~Qlczx&6ZfL7i8i<<%8S#~+Ds!GS0WWw`V0v^%nn{G zFf9)>RrBCizaD8BW*-FVVy|he^_?BQUEYwM5p{>+Z4(oj~6qAMI?bZimGVd&zu0Es^<;0WLAxd%2JmPU4|+X=oxTB?|^9u z_`38A3_G_$D-EySQ#f!nlmdxgbxZhM!Wj>-2yYs3H{}Bm0w|MS!kOGZhk2_Cs zCExCkGy&A$Umx%E27u}buy ze%s+}676^0n!jZHO%8%Q3e=GlLT!bd%Yg`|pM)hJEV`&bcaS@!8D-Js7pU&YqLu!L zXeK;5aT@T|Ap2CBmD*Lm-!eaa663!SjCftdl?Ej-vMzSpn`;(8!8!qbo36)w{yvfK z)fm5O`L8zeSB)VliW&2cH{J@$Q=V;@J%&gMkaw6rqb!9O>-L9y+Bv|7i(%t2m$$F4 zSI6lb+y97PR2M3rNVgU#!x=0OuD^*|A5Ag+~}g zjsph*P0QSVBfor%M!C_Gn)b`!U`Dk!;9RPj8fu~9{u@@1OcSOY`68gy@NRkgE%FMZ)80NN2 zXT+WE5~nx|W`22g^asXLt7kt3PaE`Bp{Uz21>hjcSc(AzqlXV#pWh8Y^KGBfLXQ<= zbl!5@u6#~lVu%ck)le!V6G#hF7P%#HauKF%!Mleh@%=8}a%Pu=2!(b$@F?C(K>l4O z>Kb}wDnq>4iGa633P+zw6@IP96FGjnW7s{FY6Z4@9^~>Aq^ucIy{>J{*OsUhuUbZ2 zxunNrqDE~!=d-&P2dmJ%AV51+&*xrl&U0H6up8Pb%XBCOG-ZMg3!%%FClo;Bt)^j4 zTUzHI9s=OHii)=i-e6e-+9d2J^aFqzu$$=Iya*NMXrIbducFG1hd9%Q(pHG}V~)(I zBeFB7L5l*L+yI@RTz{nacgaA6#z4w57k&j@Kw|glYO!9}gY37Kft^NjW{I)deh*i- z7kgx)-GGORdbqx?^lgOrbva-om8-A4`eq2vzn0dm0@3VZHTn<0ESL+MsZn*wkO8;Y zW&J~sy>WD;ASTth)%y?y|AO-X9FRXQG{viDq#hZV5r%o?f}_p`)Qu20?-66pL|Qv# zPI;81{|BWJA;?j$KOcF5$o@OGuqm^TbHZI2+GXA<7owGkq*tZ|^_lcqre4&Pm%>+Wv03JH+|c=Nj4l;k3L#5I++|Gba3=&%B!D+ngQOOI)GqRt&QphDCQ zxjh951SFb{H3d#JxI!8<^9;5IS}eP~Au>ixaAdbs`(^R!Gj(6yXLq>fVullpAOiXh zcokelEwT)+Z{`d1`HRfeojlZY6q*^JHJ%@6c1=i^5xf%c2TR^qed5X{b_l5Ha3TF> zhjQOmh7vf77}!DAY~J-uSS4dk2=Z(-diR%6FAj06G0>Lz{+YaCQMdZ4pp;`nTHyGH zr>#K$O$I?VgN=XcO>Z}7d>t-O4uf68aJP}A3+dua-}>bMk?DPJ`ouezNmh`i?^@kh zU#}8ti$;fJ$BHKiTdX@w4TTjfa%@PFf5r%9-M{7s)t|57T(&1b2YOSz8USvryYxB5 ztJ8OHRKy7|26^64R}O1~>2=9$b-a->ejI#gu79oKP)B^(2iZO2asXplzQ$2V7pI0) zW&}Ubo%6oGbi1P@;%v@y7+72XJ3&+U4Kz4O_GY(B_tJ(sa&Un?X`;1vA$@sk=m`e| z&>=qPH=nb*ZH;ZK;MB8?t^SuB=Z~xUP9+FsHq<9Wz}5^a>{bxozt?|fsPXPwW&R=H zo`dZ5A4aBGO_iaF9DSj5mAf6_r1}z}dw1v6|A_T4%lrI39a|l|KhjzG2JGVoP=&tw zE8R$?T@#nIhbA!kXz~c@d-H8q8_bkxl&3sajjcOm?slF;ptKMUw3Og;61?X=nO=uf zy0r^EV`nLA)MA4Q)NBiI_>-ET{=Z=lwXl`4Q$FP#5AWUX$kX5dB?mHMNLH3 zhXIUOGb}K2hJZ+fB|6if#fK^L_`$MvFEn5mK?@2YHY^pAxd8Kc2QF2VM~8(#o=YlS z^H_?1J%=D(=yd>ith+&z0nlqG6^n4GS z{7Q!0amSCln2QMP4wCUO#p_R6HFsA&Jc(B~9x70HC)exu=Nk)W!XvK!(#ZVm@Qcb? zWivP<8_u$l+uQ2)M{cF~CBGSY-inB0kx+zLMU^-JMH5K$F|8~rILll*L?{s)lOXI* z0`8SJi*;L{QMavyB94w`0Rq5uA(OO_<>-?6wX+9{ zqoOHDgm;m-D5}@ZJ)2dghJx?tsD5%8ViUwdvEuoo(R()!H)@re)?Lx>FKS1o-jP>s1A8&gvd^^-@GnyZ4AJg1cA%Pvnc(0luiW!HC0Jb)q z3CZx{)rPX-wsfVS`>TPPz>(i;N9v33wxzYHeiU;St+ST|=GxqE(A-b{L1|TLm;USK z4o?|>aT-zImrac-tzrpPb@2U37-J`7ZE?zdqOJ%Vn!ku(`>J@`CWX&W1CrHv8BW^4 z&);8@;P$$MnhT#LdV7@q-z?xt_2N~B))4J5!ab}4dE>N+0~JBiFjw?IShNZQFr{)x zaJ;5-Tb2a&$`0zXcb8RdFz?i}A&2(|ZlL{RvFwQK<5hEHIv6%I0-Z@ad1DyvOVcT; z0oQz9bKwLlT_*e6$69^^$yg%Ww1C_LzSi^A?ILb;*UJX`vR*HQHVN`R%!FPC2!~GM zAz;=Vmg-gBKu7C9*ELr+$5wB&~ReKYga$N_ZJhjX%!t-a(oD=2eh)9 zomCk2AK|7P>oEzINX!>)tD@R(iu&w-h-juTw8>NL(S9^lcp;BvKH-rLJNVo-zm;GbQKq(j2tAcDSat}Tx6I;jC?VT$Ut-RlBQ3U!n8Ci=Q{ zwD&&iYB*0Ca7w_HUyWU=@@D7IY>{)mGvP^NOPBw6)fiES4jiC)g4J&yRgKZgI)l@_ zs_C}?@06htllnl9^wH?M?LJfmJ3EwppDql%j}ZVll_FHIo;)erEmL`!q&1_Z9ABVh znfNm>5xOZ9LlXtACpVyc9>Jia1to#r_z*0|>KZy_8KNC*EtG97-apr;tcgF7QVhE@ z(ZNyu@H_?l1Z6q6wV6>5s&>^u9r`sf= zXT4)d(oV1nT>H$_v5(v>eN^|(DV-rn@tB&0{`WKKX5o>}aI^z?rUii(q~6E>{ip5S z5nzJbLz6=|E~T}+;aCH%F^r2+ygD+CX}Q8`Zgc(qYM`_AP=)i32zDg4EDT7pVrA#o znM_W%mm5aX9duNq>?W?)`F2-~ePs~DL6?OXZ#QXWR4jYwbvCvSxu zMu@B+o*%Pbv7ghHn=NBVg^5{^H+=8Tov!Sl;b=HdKC%f87{?))7vr~E-d1x8;FLMO zg6`7s1vzr>SK;1sf++OQqe5C_I;AbL>HgCjdbC=pJ_AlW+?0m;?d2kOANUI5z;?s-V`$-S>T z6a46=$55jJ6T*dt$uPn2YAU2a23WdE{FH3XctII}j1I|7`IL6-;jn6v_0Qtf@ONOK zwYJ89!mJWjqKTl{%pGrRx#L_7s6NClw;tk4SP%ZkT>$U+k&tom=s9eSh{WXgu` z)Q}wavhfsk^yw3Q;|p*VD93}`F>=H6hK_|q5%7MBukPW{(Rf~b#4+^1TLJVW$h)G9 z{ockJZG-!Ec9})c_%L?;6gJ4K|7OB0kJB43H--sgF2BKGALP8uI5%Gk!F8cRY{1q6m(@nV-W%@TGE2if7A-2E~bR@fCSkck1f)y*Y(g zR^({xYp?Ci2eb#Ey%Uw4MY(B!4j9^~)j*}WEukBm2RJzSz>1M}9S`?i^MxJ$qk#lR zh<8Y5pUbPT`$)K&>ZO_LMHf;c6YQkXUC|Zpe}50?!-)ET83Eqj-u@)BdICGxmqQ)x z&36*UVnnUKdm<=WhfOan(zy4E+$4Ph5q&hzkPt6DWqC$X9MQUjIfv^tuI1I;14kf0 zs6T^+*kb1=h&hQB*7@3H%kyF8V<9V6s5}fQ zV5ic0=+$v6to2)J;Kht5Aj4{KOG+(}LB1O}16L5OcgCE3AGVA{4kC9|fzOQ7eo23w z>UJ>REm5|IF%>2#XAx$R>#{g<*fi9Jwu>D2G z_cgIU=PhEV|G4#_?eE`{VN}XT{#mK81L`bO2eJ;M^AzB+;KZV@OK}pmEl6oMj_e6$ z!8wMt#?1y&Arq&B_BJ>#fy`wtrg{YlIEjC;M$YHH!;jX;E}TJhxd3&53*Cqc$@v~G z-to7PSjX-3k-$wSJ*CJ)i6c~NJ*4{ZlwNCEvA>XK?_nJP5Ohd)2@+uR3cY^2UsEC6@+pDbRT4uPU zO%V3!U=wsQfM#dKF^>+sjo?b#gA{Y_F4I*(kd~LDybBqstx=U+YOAEvBqe^270>h_1&jOohn1 zzkNU(rLq2D`VyfGG?$Ufkb73d>3~?t0VeIX{ljL1C-*A91WRQ|V2M=ZLN#uYYguM$ z#X$P@1KMuo`|sto$3ObmQw3+CnTXVP6~^jc`!2iO7Yo^g`v7Y%e2V+N-cSicXLF@O zZAb(!hQ_$H0Xm#b2`Xib!d?~=$CiWK!PZOQv*znDW@_STbhiat#!GkxQfZOUCjgM( zv^D86k;KDOw`l$YBj}Mzoax!ALyrjRZlb+@@0;6lBiM(1w8(Ho0bcc7*nvaX5#yaz zP(d;m#bs*9ncGz)&rC3F?``0w&eO0RfaYCj=Q>%3H^jR9OB~nT^h&OO+D40~PMLxj z)M2LR+(${8BEw(&cP?Tds}Ecy33iJeocspaAE>~F7+swdCAqx?QTpUT>|A_VI=J$9 z87<5^W%}^afPG6=_0tMTgLeJdb9Y+h zDxgG$BOS&eTvzvNAWGex^GzrV;iac}R%XfhHyf_~omp0^~$X74m`Iz1=mI+qU&NHz!~$Pbi;L+5QhUS)U_ zkNs{!s4z@)AzP9nvm4U#ycs(?HTIH(O07aFsqRT6~G>w;& zx{FUQ806a0CIxA(TVX5JZ>TT^KcC^AWto_9FIdYzmN;ip z>rrRv&Ks_Joj?52oI&E3s-UGHcf;e?w=#`h6_k9~hcx2Q;PwW;fR?1YFom12ay{`y zzr{>-guTX3QuVmU-6eQo+WnFZ;chvxm1|gtLXWK!VEVMpOIhev(z$t2I+E$wTK}?W`VJw|JWcPsn%nhcJ{j!{eR)CuhgYj_N7d%M-z4qzv)CGbrW(mpQ=Qtnye-4sAC*s(eVn$n1qz@u{^PCs zsNR14%+dLth`oPcc;+N1ra`PR;RF&FG)m>Nm$)8U6L`3=Lpr zQJK7<{X5qJte6klk~%yohMajIo{>ygXL7t&gWS?91c*)$zv^yQ4$eTK3yg;tI$! zdNphxJ=IKDk-RhP^I7xaDMTL+-o)gH3%-DB)z}+r+SsKRW(9= zL2>gJ*BhsXePj&BM*QWAj1pPdf10Og4OnKrkV(sC#-yGzi+lccKVc<#v{TZ6SIfkQ zQ$N2mSAp3_R=UQrXeXW6scx$G>Klo?P+ixwfitwZXjD_{y5#w=$$#6IsY2%RN}bAB zfUkTqAMW9fdjHyL$IK&BA#qGT-ThtID?OkeW_{6=~SY!(3$o5hB&^dUzJeL=M>YVG39#b^<=*h zLA(1s+i)op=FeuGjvEiwOl#f&OvEXCf!UGmv>yUEN4GYY0sZX``L3 zKKJ^n3p!snV&h*ET&HW}+*l9K;S@Sn$r}eOCIM3Yjg*Zd&Oy2w->5E4KE# zGRSCC?Q`Au$(=Os!|?b9Q^5F9 zAa_mnvh!0iR#ZQh$zUvA#d>0`i%T98Ps4GZjqtjsOHvY0 zFw1lof$%ioGUMrEKtEB>W)hk3~PGbNFPT%}zbu>RhL=uVsB&2FHS* zi@w>8lytzBmi!*ctDmxe@uop1gUuehZeM4a?!2^~tGS*?^7R2_q@Gt}J{KQzh(`db z0^rey&=w>Z*Kaze5b*k}Lr-OPQ4O+W2Yx{}I;4>aJ-9*z<_iCMb&^0tDVLKK z)*J8kqb{PQ{#)EnK}ftc4Z1<;7xs z99YFY|o6P10j=Pl?wn zdFTfw+u)FWdpHrK`3giTuPKMI3H-Pnd#9Qyd)G6RRDy7<-M2S!%-Un-FYG(^n2*?F zEE=6=v`Qt_+IO^O^SZ{aOC<9%F(>ttDv0x|dyJF#oK)*9N2)Is^%iv5oXR!((&U1@ zz`PhB=p1S29JWQ~s~DpTmT+2TcM-wzgrv&7{?P+#mu|#0znU6&H*+ za|M`;ZNBf+^y`qgDqDAvbmW%n`B6KpRSeD`E9AAS88%sJt(+UL{S%sdekoMuTQbF~ z(zOp5Wfs@JRPa2y8ccjnNYj<97$`O0Q+Ap3?#sQ%cIIka%keb9b~2?^tPnawP=vP=v7z8m8I0wZnR(Fy`tAvn|CwQ&KDUurP(^%c0N;Xi`(e zRPi-x<|bxjaVN$u)sdTv>n{|X5|FF z@cp4b1u%;=1sQ|5YSjz?<77`FzB=PJ9%jdfHgd%?Q!MmcqjQCRjO2fZeo*IFMeFQn<#W)@ShRNRtJvp>Ut zmd)Ewrw0kVcYQV2%TzLj4hYI!R5WUFnF z5hp>p#$r%E30R4z5uz`voxI&&Jam8ULzjmq`DkO6V{&{+jkk z;+`A-+k>{%M*|g+VALNqauKFjac>n6X4TEIb#=F5i908@Mry$O4c}gS(sNZ>b8K&B zmZ)-ifoOkiO_O9%kx`Wi2$OEgHM;ASO_NK2?*~epYT{}%zv}*aQUV|MHAtIFufm(} zo>J@DhM=bGMN%wPrr)(vOP`aH=`XTO+yW5bQZL$ab~W!+J2#1r4Q<%H@5YY z24hiFAlqPBWM%g1+m#$P+`Y%Vn<-kY1q4Mt3I>liW;~(yH)Xq?xMMVJzN3bH%)f0s z#*1}OKdE0W_dYa7(Tkfd%ZBWq*Vd(%F7_5KF*9x zyC3S+%~cqRsEW$Gg$*Ni_X3{&SD9CvWbF8(~EEmjzyOX4P^T|1wiY(Zex7`+P+4=SmY z$~a;*arNBNy|V=NxFvDyNDQv7X@%f?L%!7%+j+~Pa$ZC&XoQa`XoQ!dj&(Ooe3G21 z?3U}z=yzJ*xZBdsc95aLC8@JCSLN9Jnuh(K3!m+I@9nfK^W%j_mH5}r>f zoNWy79X|y>`qOFr_X^RaY z1RhzeR=45oJ%KsUvTeE`%V!^9xu}2*da^d)n0}V4&NZo1OpR2-Wl@M@f9~no)VZpG z7v7Yn9W~=R#Ge|Z)QA(y!9HfkFKDKm8N5TUYh1Q`ti<}&?5%RsOQb^tHwcmFc<0BqXeNJb!KPm4ul0R@!GPqm-{IoLZ@0={fMRk|zGG^Rm5t|59(A>5xLUNa z>WgWnR5gz*E1UzMliGJEvgo>|tmV+z9Mwu9E#?4K?hJwt8a(58lP)RLRL=GUl2ra! zg-UvV?#tz~!Cg>-<`diG=8*2Rz_(p(S;SK!s_3K;+Tf*yWd)S)S!25bRKgs@p1v*} z>n?i+zlhyY&Aj7PNKAhrKAh&u=aT1J*0l0X;Az|1gE{@bb0J@bV|cjng&A8%{pu6= zQ`8J`rQgj9%55**p%fvIKEu(2^!F zVP4df-s<&=n2eRZIa5>22Bb(_870$dVoHu{mZ?2 zqk_0H^RdkqxSY=A*4#8SB=z(Eh?vvH#O}jxS2n198LW_CkTjf0HR8sfT2~;1#qt;F z_Wh_ftl{0j%W*qvR{`ryYRbzl1^P#@GX@0 zB{U^EZR|;3RL0Wr{Jn<+|A6AT+5L(meF3eK`xSX^KnR|0GviX#^J2moFg@)FC~~-A%CKYnsJ*a#)Fq)Tlq)5YgmzdU(H*2x5IZmsjwpCJ7uCv9YPk@(%prW8-%;H7f7#MJ7=f zM?IZ}sUyy5Ief5{d#kjmryJ|T$W5`to{n8o#44q_P7}@uDWvk z3byiG;>`f7*d?W_fwQa3iU|p2dx#+9AOquDKrf^hnjrKH~kv zL!A-lz>35X6xkME3v>I{0-9@pg71{q&r}wCI$4SuOfngNrgIOcUwa{0ZD}(zR&;ej z=E~E~$j>3QSkmCY($EIqT47cG3tpE_qh~)BSu;Eoz0z8Y=X#c^gPs=`TUU2dY5ma0 z>NMB>Sl(a1jboA7G#(ow``l3>-87KjDZt3xsx`~}yP=-^Izz@^Q})W(<`Qu>zxcD3 zT-qsQUZqvT&m=i*L%rlh>p%i`K?t;%u6g;J_=a~zokz8k z6L?dk6fDeMDh#^>b3wGJknOQ_krk``q2rpf8BY-bmrNMhz8lc1zW#kr(bgQsp}mKD zxWDC~BN<%v!pcin(vYO05@MYS?tAs@fL~C2Q=QHAv1io5+I+?b(+5(aR&V zy$7+@M<>W88MDMdsBj${IL(`~x{-UC(VP5*u+F`eO)NLKH~WnMjqRmo>Eq5=V5UA- zY^3B0ai?aEyE4d_ks|jiTvxU6RO#t7AgkHv^W`m!SA0^_W2G#&CR(L`?W|=A)HYo+ zF?rP?cw;?NrFAZq&=CV{Z~-PvmwMDYW|kY}1}0j4`5~&5K{Y+it&1@QcAB-Tyh9iR4W& z?91&5O)~kxRDHWL%rhdPOiTZ2yZb8zdpi7?i!m+x&3MTpbJtp`gD1AlbDPPbR!+}K>c?W$ETg0TLkf8{KbPw0(Ywo|aRCUr`C5IfzNb3PC#r4W4uwnK-( zD4e12VCghM;J^4NpSrDzjHWS)W+Ypq%!762Hd&I9hJ<=9%3iEgb8SU_*NLU-#NM0z zKQsa_tmqNQ@$+6j4`OlEL#UN;ZxXbhT zdO7iy3o%Xo9SHzMHt%c)l{UFCGaUq1!95J`viWuu#d&vQIu! zREE{pnWVr}!6?vV(g-GCgt%kopy8J;TUzH#0B#{yv%l4?mK+S;C&OYquv0O0)LDs> zLS`xS!VE5u4(YoT#p3MRoVNujX4yMCCtT z=s4!Rs%xzNs3WZAiS+LhV5F6yU9kIPEf%i-v=njDZ8mlSu$Pf5K%@EZ99w4^2Hz*0 z-v5ew>v5A0@hqV*ZmA}*$c%tXAzpu9D;&nJQ9tP2)_rUCp1UT1IQfSF2JzBlkylLM0l=ur4| z{$JEgQvu%3;lBJe$_3Di^a1I}P?ck%VScji8;Mp{UsY!jga+N?4mS*K#qZ<2@-MoW zH|)%gKJR=&++b?(zn}xalU>UK4R*w8uGg0z7v8#j22XvD%=2H(xB8|@ExdSrDT%hW zX5h<~K*5H;{bq_mt7-G zh^6OhX8x^JZG$A~L~@a6TI57B#-TdujY%d4Yp>=NtJA5>zMK7@BE?`|fmnwWdzCd^ zH7%_rhH-BdD|kk$4DzLHnT2?^KWXZ3=*(aB@5EM{W*%l$QzeCjS_O9`5yp##A}4gQ zbQKG0vAEa7fWg0;gM8!@3CgVQp=-;P=Qh`s5^!p;qdFwXkE$21Eb%f~PozZs<{r){ z*8H?yU(mImwf8N7+}Grd1Rtt1wpnmVvAt=;Cpc{qL}ene?(cbQd5L#Ou4PT@?NYPF zdtXg6eJF83pQ5q0HNdLzT5?V%5QGg&YHY%O>}ei(u$D`xm+P3@xoG2k*SshiSNms} zR^DtjmA7AoW2ibIs zdTW?iHdigf9r^I;I@a%Mc#)L=75tcgjqN zqJd>-y;m=c&l)WhEBZ`e<97Y_qgVPGWn_H$4wkNL?Sc=J#eTT;)v4=yg z_Q*FtV02pB#5Bj)Z!aeHW?pF39FYBS0UM=61sQ$twTc(~uyW0R*R3?GE3QIy$Yad` zr5_g>^!ivxI`dOIxy7n>=5*mdKAz3heH!jH!5*PF-lt8fc}mv)$;MpGFxbB(fN!9y zc%v$}Y0y>_WF#`ABDG(NxAs`&4F~Mb%VN!af96y%X5K9eWyGCp(!Xuwb<|CBJeDr8 zCi607Pq4~RA;wec%?-XACFdHXHl7hrmqmX+6gm21^$=!pVku7H_JY+0x?Enuls?+I7qw#d}A zc5C(|D8?9{B=++WR-W8#-%(%RX0lrJ#!6L2Pubyx@~Dsnm)znluhLaULfI7v=IMCW zl3JO5&-JvlmEws8%_Rb}E1cMkmf9b^<)%WvBhCNEnBmBU1WVS5nYpL9>~UY#Gd0lL zttYW}(*#WIx7Cln+*>d3T66*_de5Y|n`hKrd6~LJ+{em&VwYA}3Zd^lmym8PRsdVA zmU(>PTDbEM-t+_et%;UJ4VEMav#KQ$u{3|G;j5o-N$l5Gx_h)|BB>JVXPIT)CN@tM z#VpZYwb)l>MY?@;h+96UfVqPjp4DD7!f#Yc2WIB^{{4foolUQZrYM)!S1}quocfsm z*TB(U0QJy6$dZ2jp6McdpI$=NzR>a-|k_#hq-`#|Al#&#<4~8i*51Z%`xM z`(ETcA5R4b zaU5U3=+IJ&d){P{H~Z%ik*e8@wWR(8fHZwQ)x(3Y-?c1+j<{}|#jBdPSD7y;WfQw- zSslVLh-zhAlh3xNViBN~&xtije+T@Teh-fiSdJ+u{64-PT}Z3I(18{ONltzLV;t1r zS*9$|AgPA&OGmL5R|B`iF=kr@PS-hz9rrQA6`mN~N&(BtPH#eB-e0ylH9i-cdlqdo zB$o=6i?2;|1YBBuUN!C6czmOtOvd1stoX@2AK}qSmDcN}(8c8qFl4V3ML8eCmNM;x zf6{hRH+`;~-=V=Nt$johO+&JPdyrSuoO>Jsy-u#mz(x+9KC#S4)POWn*__S}q_Y0YaH+xYGay!PBOB zZt9IGR6p~-uTEOVw073=|CnLrq%0?`Pa_hUmPPLWr@eRoXS)CYz^AV2y6AX?l8~B| zW0Lc!xRg1T$!U(MvRBn;%89`Gy$CL4Z)85@s#?G=!Dlvy@) zmm1V49E9xCHjxEl8d%}FTT6GF*$wM@iu2Dt8(Ju!*SsJVcb-)E0a61OGInd=7I-Ib zI)Ih1rgdXL-os@T3ZccXth-F9dOw|BoVau65(woouwSJdxS*3QukcT!>wNCssvs9! zBBNNH_;=4m!MTP2NqSanN6Wa>J*VIVoR4tm-wDt!G>a+Kxwv>#(v_QuO1gYlOiWu~ z*!&rK=d>Is`suY}Ac1qqb$5S({D;fD{b8<*Pd;D?z~}OWFCy%^oBd-94-FtyDs&)I zhdZRYo0ZCDO~%f5Q7UXMorB~99fRjfGb*+OV-!b`vg0)^#FRCcPPS#U*+BZOy}kE> z2fk<;FH3;^iq}G~EfqiB^|sahkRuw+S`SY*B=$%PvJ(S_*K{)UT}p-|GiQUZvHW95 z9rEBsO`zRe}sZlx-=9#wFnp`iRkhE2Ox) z`RF4^i_2arJXX{<0O-63RIrO+9aqtP?<*3`g^mmuBvm=Tn0{h8HMw{Vq8yJ{TtT-g ziKcd2S5U%iPor|rd6jppoE=>01ngtcuVbL==YI>No_+<7FHF9U?NG0>vRaS%5>svX z^um(Gjv5*{+`*z2R&GbScCvyd3;{j*lqp-ZHPE-La>%`NM z%aheHhWR0O5M|pDl?xG4I+liiBH_(MKH10`g){ zRqL8<3&mDQP%!98V3R0ZG*Gr1k>o@Ip@fVt<07I?{F;vQl64{9k|bmL#wIs)kVS1!P0~pzwB$sQ^Wnj zeKYZ_Q~*+8C6nQ1;dvc@pw4nlwv!W)6)M}jK;nLy(V&5u&?8${;2Fc(FK|FW$QC_f z8wQ2!eYkCp^xM}Tw;?AlY(sp7pcbJ0GiQBJx5}_P^=%zLgaJ6bn7^DW+3kVeH5(k? znsWezYykdj*g%@h>4XzBpicYmR_OnZ{CC`1K5Ht3))}elPnUB(ZD(arc5dfKgf$`+ z=24FI{)Zm43Dv)Ql5n-TqkpC=H+Ag@VfX;w65je|_hC%Zs%CSC*xR8< zW5Z`bZ;T?BffwKVYK0&#(pGYGw5GvO!!M`CKF@X}^vQ#L6|^^NdGW68n2upj9oaV@ zzlsWuJ81Y6TwI#qz3M>Z1TGfce!WOe^991Pwku62sW)o{f9)>Zi8=q|tPW6|v6ccw zCsho2mkGCyEwTg+;^>MW9(9_@D567BrrF-n8^MB|&zwD6!eVd+;N@+Xe4$OEq7&~T zoN) znjyA1M}LC*{`#j;E9TV@v9o#UVvPSc+{C4M6s&&9IKB-I&+;vM;u)q__=?!$^8PmY z)%XD?Advu~)74ylHz4I2F@}}qGktI9Q@p`n1i~*lBOX5ww*%xysR4C_0^fHi3HAV@ zw;~vbq|68!%$X#o%Qv&T8n~@?Q;(j4;}4)i(s441PDBE3>t6Rk(COf3xO6}~)4r0p-qj7K6oqj;3)sI8!c@O89g`Wg}vfFJTEI>vc*1c+!q1%!T`M~$ZFqc)lz z%WM($&XL97*SM&AF2OB0S8ztY^t?8tjPG<-p)d0w2r_1!pHTNVJ!?)l$=e>|mIR=4 z{HBg3{PJ0`kMD9BQFjN-dN4!!=X%v2}Jp#fGRIw>B}n0O~v(G+v0WslW^He z11=bN`~WY9oz_;`g$w{C(#Pgh>t-ea_x-aCz1ICAH#iKEcY7pl_gW|H{)_uSMhHxy zPkry0Tx4H2)e$9!vc7csGXV&NNJUBsJR;VH751gBF#&f(#EFl+d64M{BM0L~E`xy^ zQXy`@$^aLeH{QU3JYaQE&mrI<@AerBBo?}HaozK>aEAI_zU4ru{AJ;}G#F5lDcO)4 zkh70*CPr0&9bl9zE8M7i>~=FLeH3kZH^}qvumPob4bgBXXkO2bUfKAVS`rbWZ9bqO zEE)~&yU!@(^#rWTeO(E4S!LHEr%`m~+g)8jOEVeaCq zxH4N!{po9+<4-?c4pC7~S);2n_I=lPabGxdnaWLbnF?0!^~ll^)RF3J2wN3QFv{~b z$^|I(^3wb7qvNIV9sZYTgJiDSY1+t~7plyb9()4y#`{11HEWRx|LRiIxq((f*QRGF zhlH6rRIUBH;RdLwJBzdi&)|oh>dL(&jVGTC&UdCw{e+fbDp3cPvz=-g&@NIDX74 zS0S~7=9(sy7pRASoHUt;E-wRi_GlO6t=*{K>^X!SfV!>-%M(>tQZBDLn);;Qa3mb;3ZVzn>HYC>H$VMI0b+ z4PXU@o)3JLe=R-8-wLVhSh5J50MEm`Y11VECgDvu>OGgR`i}EedzQL)ka{xnhtRer zj*cZmxwVPF<)z^N_z1NK%Va3uQqRXF|MZP2hoU)K|1Apb(xdXu1W@!-9O+BW$KyV)ug*}xkfkBrNpP~U%d z9{tufzbIHTMHh|^!(203=~wn4-Vzpd{H#YgJxIJc#ha3jVYeLuQ{O$dT?&w)0wWu! zYro`WC1*ZN1J480VO;aT=T{5wzaQayLGtcKG^SMM6XkZ*vkh#2ef3TU_Hu7Brq=kI zj}^d)_;;&QZifKT=YLp-1A+V;t4qzdim0YBD_hv%8&}IqA8!QYa4jmXBqcSOS)Of6 z2`R%(W>9(ozzx|eSzG*!IanebYOz(;L0d_d*(n8i+y-4gS`he562wvHd%?QsXZAzi zYe~>+t~bo7;aB-OHGj46(A`^l@49E- zZ0~1x$3EIf;EDGkIrAjQRK(7$c*>UkEEJfY)Je0{V-$Hlf2RefD$H)Ve;&v-w2SfY zL)~uFKehVMCk92-lnArBtkB`j6??Y#{`&!4uy{JZ%Z9mL-~0Cpr<`~x6x>wFBJAoE57ZCgP`$bun5r0Pwq8O z>P<*MSF(rz$nCd?`EN~b;tC=Aa!$o|NOrYw@BU$d;hO9Dyjq8p4na=ahaFG!z5}WsQ<_bQb(IUH!n#- z((zkKOL4ykIdWq~unYUPP-6dp?*kxgdv9&e;7$d_>h9)ba?{99DFF9}0lqNoq(yoZ z_mc0S9h4yEZH4JC1g)s@%L0(!f_hy|U9ga2*4d6!ZzozoFlvb{6#R~(?&eedRMHI^=BeD%4N!zp?UIeJs=(T zahKQM66?InIcgGI8ppabf{ieAXN_sjUP2Q0S(n5zJ$=ZAbFoVGlg_6c!W% z;1;O40(U_~G@;7xQuKKs9Yj*LkHlszC}AWCh@9%y5D5Redml{SgQYxg$q`Qs~%a7w z((CXqov_Lt|L9={>*_QKcvrv79i>zq)`wr!bn1f`hQTE6}C zR{)#XRJ<|iqetT*NmGeFq2?Ub%$=hgmUxf{_g^xNy{WM4(9SDboaHw_lpaMUSrt5+ zC{inrcvLL3c`}jZ{|L|HZX~iSEfS?$3f+6R@DyT`M(L=`debuL&%X;9aL7{LMjmem zeRiC?W7Jgcvo#7+ZcQ0G%fW-&-Z1np=+3G_-G9nkXKSAB{(-OJDXQc4#okJG?Xx&D zCbBsJmNJLe37AuZ(PxM29|kjlzB1+1x}%ZhMwsSvq%8f|_pC@c1mi?S2MyT0=_Rk(mDbGN5QGlcd*_B(oP{lP*eB`&I)sWvLgWyDWBf`)XYUK?IA|T=)4dPIN z$|J8%4#25(U}X#VpLLBWx$CoC&e;qs#3h5#Z|&JtPEJ5KUqv44t~Qf9=O+=+CS8K^ z(_ahjj6&<)JHQecj&A}ZtjApQ?092-zj)nUWP%~<;Sb}rVzL;wEYu>%iN=jcaykZ5 z_(R}IJ5yU3zA+Z$YYEe!Hv4^-5Flpk?pS3&9 zN44nc_`ne<$;zdCh@`F?6`d$H?6*rL|4ERRn3%&w`K~{8Kz#3gHs82}CbOJ-8n|Z2 zd^T?`(P@C3lY=wELN+r`NU~?fr4mw`U}H@*N|psaFCEt3bUa!t!q)rk>RXAF?TB(O zXNq$=b67H{v~vnFImvBFv;Z2~fKI!?v3WCL#(w4p+Iio~bME+=z;4Z$)+QX0-% z@0dJ?$s$6Fbsl|H2O~_qyZP_N-Z0Ap{*qMwTB*`$SI`)(9NK@-6FJ7IO<6unJ(tsns=X$aCIhFg3OY!CgLf>zH2B-Jo z-pxdWpdR2`!wOY0*Mpf#s2nJ($Xgx6l%YR>7FO;p!sI>MhR<; zprl7mVW!2ruwQJp7(S`rS$udxT`ehMvMul1bqbvQi?=I)SN#TCNITOV7D9m|)%_>u zA-m_)HkQ>Ol2?}llgzPpn+YpCO|r9fMGNbrB_ubGLC0SJF4NDhK$g_l=WR(;==*>7 zBL6$dyd~Z)1$|c4ZHv7gwbxnB8t!O01+o|Yy}=sQNgCES27)?@o}+co`CI_CUL!Tx zP9ZkTIPqO-d0!U{=VwL~Nvr4uweB;vgGV8`S&W@b)ZBXFsNNziLG>!%^H()E0OH3V znW$WeSM2`oE9>3J~Omb2*$vqYcrfce*}Y4`(&3U_m94j)&mM_f`ElaM5Tr?5G2 zz#?zgVZ2{tw$K^N$g|Evyxp)Ef7$1Q&dE%L ztD^Ktx5O#FCOZogZSK8EcnN+PJ-97JaKVlfrmGCtgc13^l$_dfR=FisG3-b5Ouq>n zSFKC4;`l&ZuC*oAq*Cb2l zuu{ah#?jVmJqF>d?F}nVYb#qL5heoq76+WEb!^%hLZefdSx}zXQ1$RahW0Ju%(DgB zRhkkRYwfvjNu&GBthm{`M?c&_VmCxxz+)^oR@mXfmQ$TLFE@f6seGIg^cuJ!Vzy@+skv=O*H9O~`fT z*`G6w*XaDiQA(d~KG0Y1(RuD`#3W4TCMNH#gHeO^vt3S^XHuzWI1;(;bZ-YE!0JCj z)5I!6;=pWRCq<{2WmU7R=QejNy`SAk7zZOpeu-F&m(?J3+_@0WG~$%Nm3KUxx>4D! zV_#nEE;&=8)ZhwT$CcT<-3E{0{h-xKNo8vR(T}IB=U56v9nKwmN~0@a01^V?z{-&7 zTc5Ie(2=_G;yz7(XInepIHX=Wn#IVmDf;!!m)xjC%Ec?qrdjp^XS~xOjqyy3tVW*n z;vEigk4>y-$rnO5>yYS!qStAc3D0+4Oz`_T?huy|`@vzblqn~*3$C$lPqW-@nBuZh|ltE)4RREF%OmNDXvZdvaTZTWrxePW)t+Nni`7O+t>RnzG4^W!e zC0xpOtt)J#`e)bmONv|1-EWm9LFnlRDvLKvBx1e?wKT}Uol$p1=hULinBhf)Z0<1L zCn@>C$eq81fW;?rqxPb~%HoxLizphZfThh20F^rIbBo^6bmvim_W%3@y^z`ZmN7Q^ zwsv_bUGRvbzd2GGzRPYti)92R`n0Cu%sABAm=`r64&EAkmbFG4O!q1wn{VWCim;Dp zF>X(+LG#^kbG!Gwp&c0UVP6wYEx5a|?|9o~t1lewm5iSJetGc zoM5>Lz@3Q&Y~Cr{WY1AAr`@06D733&OXYmy`SpNMZY@>@$sK*G3IQDJ9j2OZL%9W~ zm~8XXSWG3{8SRYW-yORZ%}yV?zij4l>Oke!rQER(qto&bmt00Onf2=pU8$>9P2{G5 z?z)&;f7g1ldDO$r&tKrZgFR5TU)e3ug*3+gC>o8(YD8EH?~nfYM>}XW5XpSUyyJi@ znpm}-3qktZ8Y@;}tu;=;Zbc){(GP@`g_>}yTLCk>|cU-y)42*=H}og+m+HS8SsJ-=Y>Z0p*ac&PMHp*92NYr*+Q{_|H= zPTocmFA9nkolCSoBvj05T;;W*9x_`>3!u6XB%aNqHcNK<;_4piLAAX^YkX$Gtu;7Afk5!M0mbp%nP)QLezh zqwij){b8T1k7i^aLG9K-kQWIj;VlSX<}_gjXcCY2?-}<0T*bHdeqoz1SYwZ^rVth3 z&*>ckK2n&Cq<0RZiXvZ$6BDhM=t;7>_Ds*3&RB52Y0zBu=AKmchVvrLlDZ%@*nt0D z(%A|*D$#IM)Sd$0%VL~5q=bUexCZJE^)8IR4PQk0FmDF+`1F?Y*|kxVZirBbU$fVa z(?*g)SL_GzRiaqk)!C?cFq0r7~f>rOdQ&ZwQz!E~m zGvi*=_nUK2$qvUrrN6x;KT>sQesn;QyGB0nB|PXl^I6FQjZkZ98%?YwYi3lMKyOsT zZ9eh7w)t+mh-|-T+a$YH;qdz0ePS(}-wM>@YBzIkFB52YhKfu3KWQDAgdiJNs_F_F zQC41ndG9lYn-fPaEWYn_paYg|6^v4Kn_q$?v3ST68b(yvp#xim;A9iOG4CjLR?Hw& zT>mr$1;Khzr(cjG=HG>$h%z8G9V{JJ5jp)t@zC%)3G3{-4L|!D8B$!$hBow0mJmkXY&SIS9NFcagnHg5YO2x0_AZL-<_^|BEFyDb*5@8TEYDUhl|v>qgB0Zh zZ=)$-geSB9S%Jkxt|{j=LIkcy!A+imb=L$BhMI8rwJ`@EoH}yE*Yi5+q|E)pRGD?j z1C_%QmlvsRa}P&u)(wS(dViXG^WHZ%^9)K?tqtowC(h1bjI^?0$+&&d4S$37f1~_` zS9OK?f1T7K1>BFy`N6FP8)R>&vS{gBp#%Eo7d>k9vl)ZHp7512#~O`$CxewG8q%bX zLdNU+l14sj+sPRO>B&ykB@$4N02+p&vHBh=m8PyDQU4WA%`iI;k{pb}=qmV$w^%~p z9*W!CDKZ$P=SH=}_xt%L?f2=eK~I!V^#gevFJ-^I%ShQViXB1#fYwyPPesg`;s^2j zsqN9u*^h26V)KS0PJ*sz{5ITUXy!d{iQdiEhYbt@%QWRkK9~u>n-HN*RTW|_?`B4o zNkXgdJ;DR}Wbkh5#hEtO%FUoymXY?oye!6-J0)IWkBTwK2BrRmMuL6@BRJ6~?Iv0A zK%tcdZ?vtb;3m26>zj7|h8OGG!}E>hI$EDJ;+o5W0i0;|K-~7WSz5b_HSpp_1ujLk?umIKTP6{VE5d7yUp=xr*xTypqm8#Igvuw zf%>IiHtAc1HkIUXeag7%`0DjG%!c?qo@bP~+RvN7dD#V{uDZ<^g4t{yaxcZKQR~*l ztvvh{wpPQ8YNtlwWC`1V-ss<#gs13B$kTzB{eCc1F)F-R$R{NkQQwMzp7eHHOgke! zrhP`-%_TQW3i~tAOh1*O`rhuU_&Ay$6&tl-aFulO>c{?L05nngq(R;bqH(RHY9v7! zB7pt5_rwgXtX>YdMQ+XMIiaNac5pZDrhJ{YzFK^+OH&osbn#rk{BFy6IEDcCUDCy- z3K}@>VKoZ64#p@NnR`VjwY1~rOC4&k(MAW`ZrzNQDZl<1P0g}&0mdR~RiaKClj*F*xYLbA|vrtBbM z^XUA#RyJGrp&sap5U^c4A0yEKmj4h)MdDX5R{hsp29KEo0JP1DmB{wpcNW2>>eNqv3n%Ke3fPZ)k*y$BW8U!X2d2=`?+FCku3hMA3bWmom<=AkcG zi$8YM560#(ThIJm*f)p*Z+(e|M-<-lSyJ)W0=j4&(I9qYzFJ zz9!aD`3kWhl-;LlLd{_q<5O@wa6jff{%hb!!ipY@B+X-?0(&>;X*CKwT-R-41{{=Q zbLFuiJoTPMSn?;3y@lBY zR+ztijT*on8oeS;oXukl?jQCu;Y?3(Qcn5&$HtXg{DzHRz$i6;W6%{PV6RTsD-9js zZwdGWWHV3&p_S#G&L6lYoI;zIdY@PDs~0N7%HD667R9^`5~wCQmSdo-qjch1~KIF#gXdtOzzGGZ`v zK)0f54n$yxYutpKCa7=%o#u$kjJ7ZHx=u!oz*Ghz^=YK@dnM6kLH0y5UUvTGEZ znByqyYC#?GCupfL;oQKU24h~~&cii?16H+wmN%Mv3=vd}VKs{BccXhSkrjcxM+0wO zHV83~ZK+$xJkw2NeOaIQ4vZ!A=hz|H`N3AP2Os8eEgtIz5AOi$C#~kqM*<6|NRsVR z_8f{dP;-7@a%!1w{oY7GwAT{i@@B(_$h|*uLNj4{5Yl?ZjoM{%y>YO9Mj@%3HJP4u zQ~k3PH;T3mB);3m0Gx9r@*>ISYN`0+ZgAUMh9aOlJiMd&_%_y&|g+5H|NAb zcD+j99c;W+{HeHeOHADmk>Zryc;YIcN7UDK$IH!pBA0B!f$mE9FIxnzI zI8-scjU&$48~(&Zys9ZYA*GyESD-!=8PU!G@KVnmq3t_%Me0gNGNa$vT;d(VC&Fq% z^2qn!q+RDTZ61+ObtrFLM9&>@N}+bAqsC{2EP%iqEEvbBZ&|2c0S%WP3fS`=ee&d{ zckozgvUO%b$<;{a_vaYhdlNyTee%$5_1dwwjGZVNTO&vhRC({W-C0XJOnaI|HYo`! z?0O`5_2!KW5g~G$J3C;a%SSrDLZPccr{9VOh;*hmSppeS$()^!^OAGKD`byU7i(zA)Pdz?+{ohh%ZA#sp~gKjmjC`NmPSHs6VO z{^D|EuS{iK+)2ejt+}~O&YT<_~0a38e>I1vNWt{7r8na6IBET&}gu+ zH+Q|+N9t1n-f~u&E-jTYq-VwaSxc?0zXaE3dpBIF8U4PjAvO$Y_#1E=_2Y^)?3eeC z0J>ri1R461qp=o+%2Q!AJXOSVe?e`RPwy!bMp>a3$iY!@7ZFkDTSx49^WdUHSYH+~ zf*DL!I^t6Y-s&!0nDa;h9HB*uCYXd#ooWZ!2Pc^(t_3tmZ9fOIdloLPmlpP@@+ChR zVfde)6e#4Dv--?A6Nh#7{W<|bMoh&2Z?>!f^67Wn^5sCu+%s`oN#qgIj@{-HqX}h^ zM{V*X<)WJ_<)7o~4EnrdgEeLxyxI_ZqhyN>{HAa5$@(D|yz_8Lm6p5FSW|7k6?NIc zf^8Pm@iIB=o-KQU#?sLt$t3!0n8d6lv^#tj0>}bU1~C|2pi|JbQg;9AXGJZo-w-67 zgFb~*H`WV!{R}-}FqFwB3;6U~CMvXs;~Wd8k$94vqGkVAgEmFcOs3mHvA+rT%G1 zg}PO!=XQC>i=UGC;E~<)Lq|4C@YMdA5I4Drh+w(RsJrN zu96?c-|nK8I_ZHb{*iv4g>&Aql`mlK>8dp2p`t?9!z)Rx>|;_jr$MK`Ub-0F8TL?a z*-0DoP-X){J&klj=Rv4Bx=0@7GDOM#qX#>Up$=KXYRj@)@kz-lO%?iY1V39zt*F>d z&qJ05>St-txjjqLu+8J;nOfTM5#{yZsK|#A&zbFqg`z7mZYUBN0T{jwGt5h-wR)#& zR>hm&0iMH^v3#I%mDxlIC{|kM!OH#wKS-!0_J@`Opn?r?cT}OCgG3~PW)Mu_~ofnxrtVIcz z-1u_uk2zJq>4yQzhr74nKP8m4sJ)nSmcDkV^LUi%`tDj+>4Gi|Yc0K$g6H!0O#H~& z7ewS-uuK!ZbL)C-yp4*Tfl&R$BZ@69-#BVd6vtH56UV`WijGx5v!7%Y{IW09hGMO0b@r1k8L1RD$R+ z0_RcutTld>kMF`Qig_!=i+d6CvDBmZY;rdAlwr0tTec?LszGkZd0iHwl=~z2@X8Bv zB3D$ls3QRpa=&kEm`qzl;FsTZ=Hpl8_7rP`oC2_dUmbI>+j{nX62t@Mm}!crWC4QY}16-1tjPc z_J!qzfO5b9=V@T`_Hwn0-w&B>w;Hsv8a0xfPk|KDRe@KRCJ&wa(R^(v@Y+V_uP*+T z397tE*Mzi4?AfXvmL=q<8T>P_Blq>o@qx06=`veX1>4OgJGo!JM}(8j;Kr6i5QzTU zjl7;CkleQ$KkGXuy~Z0_)K(YO4RqjXb8l!XtgCG`sB~WoxI&$_0zvG-r}~*I(ru~Q zu7sgUd|;&f{M!~S7!>$SMqu}-a*HfGC7j)G*BjGeKg9+BpX?wq;LJTE=4_}>*iKtv z(mxr+KGqk$Dxay`5Wv5Z0+lQt#rGB}a<3B$)XO5nL=Pweoo08F0I6^%V$zH3EeeFA z0xUSQPkU$p)co;Vj#m7ADErgMmx||#8vm%s4bV~zgriaBiS|fO(F)9L6N| z-Y(I#ucKZa?mvUyKRl^IP(_vArf;h=;yfFbD0eg2h0hi7G&V}Aa^nS9>MS8E6b6-O zFO&EJP>Jh8(G7Qg3kWTHQ336o2wB8Vy~vI%4ATtY_h!+)$R;p}Ewi=REaY%K%!Ko; zOU-`I&N_kuM&M9;%imeM0m9p##A=7k1&uP7{Me<_$jp2BbzB ztVZhB>;Y6s)s5N*D2Tk*zeAh}0MK0AP6iufGBmE1VM#>=jVZa=XZ~t_6lwuKKydjS zrAeXyaCVBH2hKZPzu}PCo6*?o#h%tSSY>PPxuC-e6#r?tsD@mF0ZwA8u26knZRjh% zcu)rHJDiHZQxc@JpCcRDJcr!wI+>vYzNf>&@l6gG^_EdF>-^u*p1(m_9n-j#JLqQ$7Ztgfp9U?6rva2Ym>LB%dEj z(drMju>rlQWc&~_;EyY+RX2B@|KqnrD0{6`OJ&`$^3Og?UfWbTz7i>l z{b_DOfxDV;jW&)3a2qp^1ePzyeK^9omfw~i`eQ3T!nwy37^^R!)f#?NV{nC4Y zQXsX2SiyMq{PBLPpyihYFlh<0;p-IaBI;q(83uK3rzq09wc%+8)atJ;?!`70 z_MKEc{}u+2EEaUmnK&k+mK4ASl$F+k<4=C{RT`+0Dr6CUW4CH6PW77v^>?Wic?PP} zN@G=_Pd}ys+Ho55kAoQmJ!^_+Hh`Pu;Dx;KmH9>2GWDWK)nfnw=>!}`9EQK!KGQbJ z-D&*mn}Gdc0PoM&Uxd3X1!J18{x(K**$Pw67P!LHIipQJ0j0kb1(hgPEH{U%D;s9i zJ1PM&`B=EF6qb=fPs2o^7l!I*-zjuyC5>BOFm9`UZNMXt*+1UZUlsYgc0^k3q91Xk z$Ne2rn*;&$NokVKc5B{D{R#`scW3Htk;U};gQBEKJ!rlG5H@?dm^OBsa#ijc-m(hQ zt?ZfKVjvPp98)EkO}qSYC4y@j5yyXQ|F6CfxS`SjdtgW` zRDY}0i(T!B1^h!bxZgZzGU58qipEpf5nmvJ%;v?m21{!3?TdN{1;YorPxC0uVbjd9XTC-fm{39s~X z*vrhvmI8-ai;u)Pg)L&T3~N^F?+Uaho&@bTr*P5DG9;^leFOtj_(b3Pl9g(k)0?v6 zJ_s-Lp2nw+d;|a*L~DVwL3>cOZ*wT<2EcpX9AAIxWl+MJJ)3bxfHFOCSlmxb2nGc_ zWK*U=a1feUVXD>>W)Tb5Tf#6lE(}*q8fN~EhgJXr{{Hl~IySGW=;%a-RY7AaN(t_P zP}5ETXveJ0GOyUit=nQ;kt8&zFSq6)7CIOyIGM^!^C6&^+^Oy^#X}eSf9=ljR>%jd zMC~qaxy4$jII=9w;QGf_K7fw_a5jblr#3)xQ{BaRNtPT>y6qnz4NKtUMV-R=Tka;4 z6MNIaThABw{J#8Zozd1C@(TI3F&E{|Jh&DI^(Y+9l9g%jpl1QD1WPG7R?uh%Tg=eb z30B~m!UbwIQJ?3eK3&HD@YR@+aA;0E2)pXLwlxV^(!D1^jPGx*slGB5PJwqn9O22=fIiM?5zx(n@VXNqABq~JwPlBO0Wbb1(c*w&oNNP( zY5uu|h+0h5PF|h$g(T`bZEuI7PeL_ntsp^N8+*w0N2r9Yt%lk-6DbI%OpIJ@o0G;! z#^^ra?Ya1as#HjYCs_5mLs-I$sP{8%BY03cuj1M2Er`VlE)3)8@cp(?C}k>a}B?8_A9xti?F=fqNYahJPhQ? zxGhCFG+X>zGXI%O7{@^Kq|190<08kcDSkS=b+w(pZ7BeeuX{tGC2b45-BsXa2QM_9 z0d~bzeCN}KnFwQ$LiCaGy3w;jAkbs8B(2#D2fejf@`vpx^x3q=6T|?fZ%gmiRN_zx zG}}?h>;RzitHYe%jy+Z}{l)ug*BbOXPGC46&}}oq7afy)0NLLwZT*`ejmraT)w+`) z&Xm!&`!xUzV+mOrt7NQ|vtrNcT>+Mwoi%mqOk@qKQNFA65UAV=$fEzx*Oe1K?uY;P zJ*em0|66xC<4=MARUK~s2f)t%{p>}<|L?E%x&8NN(Lm>i{}qQee*yYmDQGvqmHzkF zpp(-7_b>nNUH;!D0xqEa-|4w>;{VLX|KIH8p41iHnR07cOv!JL05P`wr`*u(+5ZD- CA{hw) diff --git a/website/static/img/methodmadness.png b/website/static/img/methodmadness.png new file mode 100644 index 0000000000000000000000000000000000000000..9dd0681d4a029b11c8db74154934333db42e1851 GIT binary patch literal 8650 zcmV;*AvNBKP){sswtzQ z2U2NM4~+Sg*|~Ye`Aiip)KkkqST(e%;An`F_Hjp<9dA5#tyZM#*c>nLm(6-8;0H&euoC~pkzcGP^pvo? zoQ_m7qyF5-eu*oZo(W297jMHLz!*LhCK$m_KfMge+>Zw@-Lj%YeosDsnia`aW!6|j zXPGz&Ox&VoqqQdw`<~P1^sBmCRo&n3{OX2#^#{ayk5 zqx)A5sd8s`6#f07INsOPNYyR?{ylJYe&uP+vcCj2G*y2X2uuaJO3_cZSTh0c1pa(H z{Ua=quf{r# z5n#eFm}W#)0Ox{UsP3ID1bTo>VV}SYM&vQnu8qQQ4aU4U83ZISOW<1Iy(5&$5Dp5m z8+0qGPnAl+^VYIyBqmo!IMNNAT>YgmB~yJ4_&$)1G{`6ldr&R~t^?`GFYmKy8jk#U zqr*UGVFAd=qn93pei5D+Z~r26087Wve-WM$dPXlcJbd5xP&<>tHv)EySjnxDCxcW8 zClS@sc*fBPU0q#4mZeKo^}RU8`4*tQ9}k!ba-j$p0(%jDj_S9&y1E`1uVhjWBT@nu zF!+5nMosoQB=EEmxwE4b+}2;NY@)&3uU~QMu~gAdMxnnhPSR(GD-=bM2liCplQ6!? z(Z-t(U0q#)+Uyi@(#A(=eh$n4J_x$ZX6f|qD7vF*>1OiKofGy63X8(Pza^=pk5ETE zqY+LBoF!uZpgW4bJj_EGwRVvR9|q2!pusnT1Hk4{%EZ97@pmPCIB`?!ZsZ)4xlN4g z6=47P$8vNbiXzWu+3}#CQ`Ju~c$F#RR>WN>eFy@~!mR?+@-iWCj;j8Jb?x0YiF%I( z_Az))ctAHWwJ;{!IOXFa%q<1}7t59OH^qiVBOHKP3Rh*SPtibRi$Z@IqE~@@Vk}QG z4s1mE(kNvz6&XL_h}nkf*HEqJq*D+s0Y2WuxDH{=3*#QkQ3g>vTj9gN$EXoa2D}RV z8st}~K9^mV40$yM|BJE;3N$<6?jUO>kHRHU=||Bfu4rb z$rk7LIz;3`kWY1n;a~R^3@XhK4h$o*<<_pZ;3m%D7SVRbNpkz)=qQfky}*6-*P|$! ziR%2ih3K%2>qsMtqLQjT3r957@1FrL0XLD^TjDgWF|I5Fcq)pbAEJ6T@SlKp;W)mh zuoCpsK@hATr}c3-WD2h|`aV$;Dq zmv={D_yEEusI8{66+IjH1&mHO+ki)L4y0)~Idqi>|3w{9ttgb;4m^potQ`)*IRW7> zM=YTnMC2aeTZdshM+(P_kuRc@hq*r$=$hzGjWIo_E-C1D7TAF?E&0uoBze6Q25S&G z1Ot&7Q$1srA1T7gu?yufWPos{ipYQid^Ez@jnyAvBmh2`m!GQeb_QG54gh!Iy!405 zBs~}Sz=$P;Js=}H2;cme3Y;@Icxv*Ys-1$)FI-9lUh5xSM5{*z>;-)l1}0{R$Px7i z8eQ=2Wh6v|(a`#7VL3NWLH`OUi9)nGfYf)~(5l|by0}xe%n+H{-K;M|!5;7Qe z#XKUgkmz8)n`q&An=wZxz!U0Baeu~OClNRi?bbL+9*IifJH|cY=26Buvqr)js2)5V zgE@kTf!Bb_F!wuv*);EZXbvJ}(Bi}j6{hF?j2NEP0AU{PRfQcx8xxHH1_kvdQkZW| zz0Ce{d3RI_egtv`HIWUfBAS?+Cu+1UU^B@78>LLYs{PF`e=t^})fdLV%1uM!#;>EaJ{+lDv@6m?sagy9j;iDFjO93Baf(fU;QaMlx zgY|;!qb7wME5c>K|E1X#Lv=Cel?)dBcX*z6#G))5G27FDa|@RbfDAB%ru{f^99KoQ zuPEFO+y;CrPLf9^Y{YH6nUEQgEx_Z2Yx4x&R|@=j=Z= zjWOFZXTR`pMtD7O~)>SPb*cNM-LykxWNMpe5M$hXWh73VZRLxpFo>QlJ|F2rpq zI1{9PquT-CUZZ@M$ums{z-dG0jdlne2P|c9Bt$j#?zVE0tS;7gGZW5nivGJM%Kel! z;Y=EOdwct%D0&FwA3*<8{d`C4C+yf}%&^%RzaE+2A7hIo)O zV2dC(tH{@>Bjz_V;k0p-Adcg`9i`F_6wzEiIE|tHzD6hmTY&o&xuKktpPbZzHiznF zw`YJmjPHN5x3_nQ)wrH;Qe04H6|L^XOM%`(8xyjV&swXlelkQnob7#U0O=yT)k ze*~0)XMt}Pu1%)HH3QiQTKq;E8IgE~e3R`WvVAfJS9MwVzQ5jxtTMuRh%Co#mo6z( z#ChPisInSsA02Mb&O|^&_5;6Av>89+A(Rb>ywG1RKf@?`)?p3}Y~yzjUKoZ`JkRUy z@9&RsoV^{>*hU*|w9!TzZM4xw8*Q}FMjLIk(MB6>w9!TzZM4xw8*Q|4w4fb1aFih` z1;+sAf=ma>pc~_IWyCO^s1z>19obdIZZi8=oTf)QWkwrq93Dia;Jm05JQI~djY{Fc zs1)AP(b2JRpmi{OV zd#I&xs~Ff_Oy4#d@?=vpQFS>nMfyG8-!W0ygkAe~9%wa-@SeC_d4>tx(byG5oxr>% zil(R@>@AlMOl1Ek6{O*g#bZFu>5igl4RWz+K8EV|EFm(zm2RUbDhbRq?#SF0hD&N; ze>}dVx2EL#UB;N%R&|DmM2PyR98{6lMP%nB@#rHtwuzUOTp zDUt7BnOQgi`1AZ%ufju76n(SlL90Cm_#gurwRE%1vYT=Hx5p3H{zq|VaREGC3WC2X zS1Rj=uhbf>>N4Punkbrp?ux>2lL+fQF)vL-#$;99DagGDOBkG#*Hbt?Q@t#TqT7q# zM4(?`4^U0XJ{OS-fg#zFN9cIA!iOxhDh}=g?gj2?u^xa4!(f^bS%Gqf!l@#%2%!fg zLf9$ji>R)N!f*}7yf_gA6h)DzY8M0V6Z9g4g+LEThHwyMOQ!aTs1!b#S$(!qTE%*0 zvzR&~1D2z9BTXbT&5sSe0Qfw$yg5&6mi=su&o>mRixByJkm`yv)flb<{^x{sfgT4m zb9#=0upf9NQ@t|^!~4hkam$r-dq)(09c6~XXK?%VZT`1Z(LWMZFABroRQpnN2gn1| z&h?&ya7if$Zf}~YsLlW`Q{i1L4tX2O4q7CfD2hC_*>Sit%&tMGKQ0Q~p&#!7_9EPe z+Ivc2uvw7T4Qy>i8X5$(3k3cU_$&kGrxje9!ubmC2i6;r?{r4tclzRFbFqnHng)E| zI|0Yh!RbHKfX*NcA5YVC*zKph)dzhhzGs%;IK)|n%Lb?@x-p%g z>|G7sLG`HLr0|#GSX9xt(t#6mmV$0evurbNl^6@u_X8`)d|aZg40uvRZoqx)Q*hk) z-N1$JFg@uLcb4;XoXFPQ3R7u7I5vOQ>Nx?!{p|%FO|xwEkVc0=XBb8*ya)Iz3dx#q z2W0HTWdeL1ml|^y?(`V}rXn1JOR0VlaJC>>M-V*UEaUV%Z?V9qfw$z(co4^swgbCy z-#Vg3$lbtvgjdrn8&El#@e9mH^%~GMG!WSeB3dyY=!7CS3M>IGY;b##qVL6JgPV+; zD!YKY8GKrgCoo0bsgEnD=^`O;4)DiO=x-^ewr>>%%8FL=dWJaTz&6k?;U;V~&H3>z z-JV{l%EP#{;vK+Bg?ZL-^RXO^-z%_2;ptJz?N`-SpR-F`CgPl7gIBo zRkKdtQPpFcA)IRd`OijS_)47AWWB=4?$ z3Yq=8I8E0TFGf)mqV_~ouSDch+y{*qoZ9}&V>TG5?E+a?5WRb>>Vk4Pxit*@zZPSx zIFX}zWY$#N8P3n)j*MCfbY`l}SO^ zB>82dI$d2|9X3m!Q`PGkeAwZ9b?QMggkun9;7%9?ctx%Lb6lwm5@*9O{Jsd+iA$vB zy2|j(>?UGNDF-(U6}Z zwl<2Q^{76YuLRY6Dzg->%2b~MzE6uQVQ%$Vf)huo=n&P}KxRCD9csUd6UmCJaEQ~l zX6C;=QLR_Fn}sk*kqU7fC#4{G0^wgpw>dPfbva_QU-mY-6*2M8`skK?POe-&DE^tkjrjG+Z z>*y$TtLi&Yu4R%$G#JR(byD4Vzq@z48tcad>i332B+NXF8t1D{v^`_2%D(+ zgE~c+$JkO6j*4=nvb_`pHv$!e4*`o5J_o!PVYR6J7IorxT&}zX{G~CbR0@Krs(K18 z8+20#EH=oL-G#{fMrBj0&#UTxOl!K+ptLrl5l$JWpjDT~WvKm3M=98ZGF9MOt%*<7 z1gK#X?KTg!3xUh15oZM_dw(ByI({{c{E|%V#lRio?6Er8sv^*+&cwcQ`Hd(H9~NPq z!s+?tF^O!sK6eU3GivWw*fh{*lI$;q!5Wkex%I6P4x+lHnSL>5yQtm@x+-T#Jqj0; z!r%&&tsBotZ4KbQkZ96XG z)*>7+1c)31G8-onTtkgWb_45hR+8$Z8<$lr*P+e=uA`QUZT!im5tgfoEC6+OlCyv2d<&GS7k@`K<&BWv<#gtI}depf_h zP_r;};mo0@=Kq6R!hV7BGlUDMi3;N)9M70zM7SC#rioZD%GXrPE^)H%`T6w_WHv$& z`reCUC7v|P_IRFID~}e+gl>z&jDn$F00q=FiyW8}n+K z*)cgj1K*#Cn=gx~=`Im2H_l3&Mf4paTn=1}upBf9eAAz1c0UEiP)|5Z8N7<@66A*n z8}bgj3-yXh=SGyjN7w>fO--!o16iA9*;8YAP~Vsnfj<TJI%0|$+I5D;p_r9L3WZ9Q)i><|L5MQd96_;32GjHLaVNf#=Nt-bT$tTg4&ZXNdX7W>%Xb{2(|4syH7}Gl$%J zz!VVx&dQWPB!WqbZ@>a!>m;o780t!bA1YP61h-(QSlURAwvhalgMlVphQ=6T-Ke1(5Y z-K`hZ>etFiGMW!&2+}moe9v^c5EyA>{s*UL?!h@L4+4>h*isbTQK?jhSgBe;;QLd= zF~f@(8ol3vTMc{DG+RxJqG6>{Nj=}+3akSu=VR~386Z+z##a({~gDEu?*xMh+2Rg&>Wfi`sunI@fAuh+`M}WVj zMeB@+pmoFz9Ea)`v|)__%vI%D!5-?4qJJ8rr7gob8mpLZRez7BVzO-Ha$OSVnZ81t z=+^`^Kh1wYh3!|>#u=E6*@N0gaH33cd-Z-%9dl+gRr&;ZwZVxLE)kUS8-=IEiI5>8 z3l&YQwLOe2htU8GqzW5=ThuX+*}yVtPUZ!uUR4T$ACJXvS4~9{MbTPOjs;E!U98A7 zLFcJsma}jS`8XVnbc394MBhc8(#5ZDGz`P11%4>Pp8&pde#*yvWBx2t{qLYVaYQv0 zM?htDMg;njWPe8~ycZ`cUYoZ+M})UVVfc+WNp`jJkQUu9$OdXrzu-K)8*rRq4#O|G z3sm(FGG}>d*_VRaX}HjiDyAc>%=;QvTyrVNc{j)xkb&@^K43>n{WpO)jt`ZB;6a32 zMdUh~6Rk#th!`CAEn+|LOOV^KLlQw%MfZ$xqBoBB0^b3?XN;NVd)|pgct0Ya#C>ak zdE%nKnAuNp?kH za39DuII*o8I7?Mei_7JELeG0nL{1fB7T|~{6OjS&rN5kPE``CjMC1Y-fq4oGj1%)( zil9}MT?mhh3x5DN0Ur?MtEw6xtU%;6oC)}NhEANNihiOL1ixvSJE_h?r09V3aK?~M z8WWBwg~2S8%W%=vRii}(cphu-YqIC67*u64dqiYdJ*8Z!yjlvw@8OIIRYKjXWAwe` z=J2@;W*ToO{21gb<#NMxgM)k=qrjBHV7`c)4U|#(5vBosR_%r)NjF#9wASuPtbMo? zhOsE;Ap;KK4r-RMreL#lA#e>kAOi0~_2<+PUvF=3Kk!UcD(wf3&x4l%y7JgTps~Lt zQ)H7U54spA0OmS7JNG16wjUTtR?if90j;fC#rmy0tTARcY9FWOl-r8(-ToxGH!pH` z6o#{nk&CSAdvJ>U6x<|ViX-~dMCARnASw>ybd7PVMsI`iB~^R8xKQ*@P)i^i15c~! zcgvMZQ%+EYogm*9WEOzHUL!IR(Pw@TthHJ8B~^V}er>xI+0#sa4Ms2>XKbkIsc)!q zyH)+)l}a+^*t-HeoKAKFa1dc1Fh^Z1t|aU3TdrErhB$vOqt zhP6ea&Eh240o>;M{+&kTBqOo_bQvOFz!An@n>SECMGdS1%dO_~= zeIMWVyG2BRE^D*7s69#G3Y@OdgHu}f01pX#J5DM;ZT6YV<;r&8#%9aI0GE6?{~I%$ z-^`wfGyj)xTL$iQL{oXz-5_@Q%b1;gcD0ilsAP9PJ z(E(S&p!u^8)s3w@o2vGLY!?|IoOuXWN2TCJU%d5IJvfS@5^9fA^m345IMV?sIG`I6 zPPts}Err2W5vh96g`rCNNXQiS3%UnnCeBFEq;gzC9LzgOUe{$X-PrfGTzSQC}P ztstjTvr2giQ+(f_+6>|3tIgRAj=zh6?N&ANJo9PPmQ=+z95LxwXuHf8Nm)dk`{XfgiI04v0Wqvx`<7DAo|&n;avik7Q&*sfw$+;)qOaNUME9$qV5HLjxqOFFAN`3ro4ghJWgJJ zrATcfp}@JWphmGfKpKXN*CPzWDFSDRa52iIIC;1ac&biJYcOWJ5El+o4UVk7=(~D* zdmEc{>rl5}qlylkw|O{N9AkJ8)f?laa#u5b9aT`B19Fku?>~SJkdbzTdJ4y4?U}Sd zIK6Saw=)WVBrs3m-%yK6f!v|mpjrjDUE=GAxw+ZC3dk^Kt=pEM*WnCTRb$d{eY$-@ z$`3Ntn=6%gz$Z8?;i$*~ftPXn*}sR8`-dx=qqs_3jpJNS)>rPrHhuO$7s5F>?y~@A z23?KvtkME8h{bI@dj|MnD=%7v9<9mVBf;XFXS;!yfkzbiemO}RS)Go0h)8`j z$fTk#PPT+$_!SX$h|{S~#;xY{bFvJ)3anGrABpj9i{p4q@oI4#@9hl3+f>-APAWMA z=l`m=#%4Gjb-gHetF<>(>gL`^QOpNGp8`5P-~UE$Z|_KF4wTf|MZg6pmkX>?C+6)0 z9?ev5t?zg^BuTQfD~i6Zu+gfX0h*!wB2JRWo2b4ID0_f0Ia`w?*%?LAKcn^mR4>gVQ|6%r2!b2S zs~}Hc?Ju2WvNB4mBcm@#_D50l6IES@>czQlW+9GglydXF%fMKhpw)!4r}e)tWudG&}~)sRkCaw cqkP-{4^rkC_?BJZ%m4rY07*qoM6N<$f+RXRJpcdz literal 0 HcmV?d00001 diff --git a/website/static/img/noghost.png b/website/static/img/noghost.png new file mode 100644 index 0000000000000000000000000000000000000000..febaedcae8212926ae8d7eb3705c8304531f8375 GIT binary patch literal 22435 zcmb5Wc{tSX_dh-hCbARC8fD48YiyM*p~XIS%Bbuy)`^)myV7D`3R%aNNU}^dQW-?{ zEYrx+P_`5jzx(O+{(S%cbzODUW#)dK`@YY4oX2^b^DN=~Su+l{Lu@b@jKlo2u{8|F zJO}>%cOMJ*llDUS1`GzjamC2!e2BH#DG75EBQ4eAT3U)~N~$oJY#J`@r3LaNf1Keo zPhSK!P~T%ZP^4k`bC8vW-sDpS?Osv9INjjn{I!y~8}lHgw~$>;96)l$NjEe`mGV#MA@HJZCE8iV9KIS8TYFHy@6^jO^I7JfJvW({;j8^LaV# za{T?@!Dd;afqG$8{c}4FV^7PToWe9L*H(NlS#DbXR#86MqBu^Heq-^b^i6=Pk}TeL zAq~+!BBH#U(H$jALzxxT^6qw|xs#-SJ*;iXO|?}k+8OVRNs!OZXFcO582DC#EM&kh zE%f@FVJrKG@w+JHiD!kzG3N|ovk!c*^B!_E^2yR+wHzP7AI%aC$7e{_?x9FcNo%L+6#k)ihC^ zsar!RMN-_F^~)wHft$m8WjQxXe9JJhzZQK8rla>&bvd}c{wWsvVe?K$AThybHSFb& zBj8e4gHB^YU@#>q=r=s#sbLrlCIK@yK4}|~yEqZqVux$pT^WWm@duH4M97%WALTkl ztn05;{|fu8AgrPJ$o%0+9)1&J>8{oEr}p;g>DlF@k004@b)Bh6FPXhZC+XZsZZ|Xt zS`q0bwhp^Rw-1L`{aITX;R<~5tz{(6^T*Owb*~`x|JN@CJAVcN{fl`B#ezVlZ-?Oe zw~oWcQ7%=fm(yyvr(}Hd9EkhXx1&+8=(%yPM!h8BD4E1rqy6v#HgDe4<>oKuEyYOO z@7O=gQHh7DUbeC5+03=qqg9@2eYG4Tl8bV`^0lq#LN6?jb{#{LRp?Pj5=J1fcJXGn4cVW}F|Z4tAh{t4 zibO^9cF4uYakX8)!eBf0bM5N!S6vpo*LiExfpzB)!WB}-F3Z!N0P}*$x1^tE4lZz{ zE3WD>Z>KipUq5@$PGtcRhlqoVGueNh&CL2&U_$02Taj^;g0E{io6Je8o*^nhI$b*+ zN@pC@f3YYbQqRX_W*s>1?sXwF>FDmJI>mLt5HZXbd)%PHy#D??^P;IsU`w2L`{b5f z5x6Xp#ug$+IT9|y;lj3^)s+A5;Bu|B)m1(((Qhgq4)pKF>h*=+P5nM?eY`*HN5gt4 z(&xyzE$xr5@!+Fr(-d*)5?`1so_7S**O6?$71C$q8q>N4}T_) zk`H{-*)k0-y@sMWP~JbNFPnU`ZLPlIZi&TW{E*2<4c?F6?zO1>9mTzW`C|M+iUVPa zuZYv&mf&nRm1-I2>qVL`Na&XSdnF{>%Z{c;mm@0E*SXX!rnbo5l*q!k+4Y-^*Meg% z(vH!THy(?{=kRt`N>Qo2FVt7~`+E~iqARQQ1k;#%Sf!pC%kiVv`GrnV@eRL+5bq#c&lKt|Gt>J<&B<5CM11cz5N+b?Ou67b zyr^WbYSz9)Qz0Wi|3a8@57&$HzZmc`^9pOLY3(K@ zj>W6@R7<~=t-&hvESYaVaT&bnwlIjytWH%?mp{BTY3?o@{HdpaFvUBa6m>cYf(Iub zQJOa8^AnQ>W!3P}qNCj;Mk-T4Q4jN0i{M=QX^~6M!g>q?MnzAgmTqTfjqYO5e^wDr z(fgV;OLUhH(hTU2^IRK_W5F*q{e+N5C49rDO`gdu^U*)psjDZ+#8(%m@@PjY+J?$- zUT>KDEG3cmqvr=SN z@+>_yfXubL8HvTZ9?x+sc7I|jIOl-;De>-^?f zydl(9ZjxrEkY=d*)`UBB7;>1rrqr<$%MDZebCoulvDk79bJHc0-6B(gIprf^uP|)} zDEx`NuM2aFUydp_I##Bw8oMM;_M{5{&#t$vGV{~HJneA*gILvV|3oD%hM^4FT6Rwk z8J;)fV(;Bp8e46E3)*y+w_&mAAC$;126zsHsHncdB5I#Cb%2{y!0EyqJ~gl3Y$YO{ ze>G64AH8@#6;?DQnJZs=Z>%oJ1WXEFr#VuK4B-P{hB0tq&C?sSh(SxrVoAcTNT_X(2`&Mf(UZ88G$wL+ero zpEg?V;{D*;2X!!_Uf7|>9eY2hLf{K#MTW-2N9tJ(tFthVG@k3UvrKkWeG+au4Zi*z z`V@L`AO4K!1w;Yz=WFd>-6WA}dsJrDpr z)w%&3e_AxH?;PRFAP*Lc)8Q(pyl>}D|9(?GeH-9}w+c_hS?FftLI{sSc2spIep~?` z`;J;1UfIQf!@om0E&Mr)HPpfEk_NwH8HmlwK4~&5Bc92hb^CyhDcD1L{V<^p0xGm0 zEH``l?y&I2=~Tmfu!+_MtdC;S+8l4JP)<)YQMpn|ty!?ia_@Qd1iL{V)=|+Wz7ZGb zAdY14X-jldtyL2<`3mA%S8UR-*vR8$)4LZ`4Mfyb*L#0)mq3;(p5Izs` zB^!I@LVVMB*(6>Z;B=nSf1?0 zw1;lJYDJ&jgVxPP6_wo>}PO+q=`gUh4JuhQA@W$Ws6II31Ir z#A#z%ECDre_@E0yJ5IqZ_WO?E;ul733)&jrts~yS-Tjt3SLpa=H|ZUG z1WUvLAj{_l|_C)=J%@WB0btU>(b?B zmixV->0NA1iq7-v4j$TGA`{Ks`<}nqIue9XX(x(bJaJZW2;Yk|l-VI|C@$Lz?!IbB zFxl!Rwi1dGa*kt%4rxyPYRTEKn5v<-PU4;go(edzyYpysJ@m&^Vwm)S$NDwHr)Z{L zaqrOwr|$NUEM1;&U;QwAepm0`)o$N&e&uas#rbxbn9V`P@b7`Z^YG@R?I_i^(N6r8 zu-%ipV(I@HysM_}g$p?le(JfmOKGvPe_VD8eToX&r<(rr2Vz?Utg`?+6ygq#;fzt- z#mUqvk9JzunCTt-dj=k(6tih}dswc;$e}%W7!}5#x2w077e_mU=&Z<=VARJg6$(Yz z@i`;M?`0e(+fg3K=OCuu>Q+=r%;Y?YWLPb}*^CFCWx1$GW5?ZkG&VstAWxDz$b83l zM)t?I_ZphxXA4Xf`kh~DM(HniKkSpQ?4Bpw}?IzI9(RtWi?yu|mM>VT}~i;qcjS2fSWppH22`Rq7O9 zOF3%zGmH}9sP3DZzqHNes@Nkwd4t=R>e!$6Pb;lS-FK)=kZ=yEi+sV_wlkHGM-i71 zgFjy#WgoE@Jv_jIkJ~u!gT;2SzfQ_Zzk8fgQ~C#H$b3uFheeSNmI%s&gw^qGMQslF@IdB*G5Dr98Fvo4!?UV2Emp=tx$aA7<4D z3H`_(n%}?!ir=TCfy3#qDh#Z&@^#{Ss8qCewxg_S7hTD#=z~7vUU;SzwaD;*qijGO zUh|s%LdT|;qp_Z5jL-h|SudIbjUo_ZRsZq3UvNx&x`U&=KiRThPuxQ_xUY0_#qaxZ zt_>ztuA0jNFsXz(8+$Qb-nbIG59Q=h2nugR3+T9udYW$;tUTXRr>9oq59nmirci5# zzTDAyrIcj0%3en`MYeNa|6xJ&HFp0(`h|ARovNKVFcj~aaF8gsr5?O-4^wrPsi$~D z8`p#CIdA3|Fh=-OF6A^Ygb#X=g{dk*?%JC9Hm;udjl$L&Ar zv=@wh@-3+Nw+95vjToVds`mZ;8S7EMy=hN|4pmju6cAxKWwXd6v zX4QEZ%EfIUype)_b7<&2Zn7q%vNo#4*z%0*{Rcz`kTJabgM(vWF%R|aTH{TA@^^RK zMLN&9Ng2GS+Q(_oIA{+d@HUK(ZI`brEg#0+!oc;D#AjZNZ7VNZ;cgx1KSev;Z;<1` zqZ->L`zl;9$F$m~J;(X>;Pe@q9nKh~44?krBia^|xV--ad${9_O%1^_zPz$Rh*_I` zqlxdWzx`+W^#-1)zM5){a&6w2+Y1A5*A14S zcJv#P0{YAGz$-cC$ug(=xVLp`71#Jz7G5fpAkKm7t$(J}6KJ`3Yt<$Mo3}=98Js@q zaRdE^&6{&(%yWy6b{2QbduV=r@zG0L$0|42PFd>wn^Ru@-chg7^44bR6@zY|C5!$N zcQ2SZdgfZ;AIl}ZERGJU@9-?o6D`bt=0=HI)EqdBW&V8&O}-z(x_k~ChiMw;HELIo z-cNbEe+diiBqh}3$MO#QMM3`PHF{tI>pnnuQulLIS{zddoa2sNDVbNGYeZBwqGacK zlEq=+l~M{1#K>ce_}p$-n5|jnf$=H^%%TUOxApc-7ypt(r{1`g%V($TLs{v|A~R9X zUe510h@QX6Ly^`{_dU_s5otK&#`!OSd5gc*axt^Cz?!F2!Rz$nmn&%Y}wWhjJ-WzdehRomu8lngGooEaajTzWfmVM#0-HSGndV<^2tQMW#CyQkJtUxD)$x;Vl3I zKce@K*ou0l6BY*H>Ca=~?qF@|kB8yI+txM&nL!@u6+4V}q4g|**M^GO6&5?@b>jk&Nb2HyHlCLoK)iWuOFPf8r=U~cZQkI2m!}TE$camPw zYRmV#oey-B?R^_JfmwFItoCw^Fv(Z_`?hr=2dOMEr9e+9kLXNjuoqr7B%c8x%;dt= zQV6iRl`R*To9=9h%WrvkNcp|p;5p)#0iL5T>d=qRmLigX+sm!e@tgjqs4bR#?L=UR zZ+T8x4}R$7#)_eDl``@bp!2-7FrmuS+Wsc?jaTS5P7G!8XGXlhp&NqVDz3_^-XF@o zSPPbmmD%Y5^b+Ti8~DK1u|5UPQYNK_E(qm3Rrk)${}#CcvhW>@h5)A_*~Xz)3|4f! z)oWdmUE$ih!_nj->dE^DK4oq8aKr<5g5_dD>DO)&=j3sBj_c;(7iKv;o`Yx4^56)B z@LWfEDwDkpbfH!rE$BCA=EBLE!1SzRZoR2+H}C@rCW76izF2FBg=T^m^|sem?s4c; zB`X19*@hYiSejl&w}BpKYvx$~gH@zFRmFm$LBjwF(SL<!2!oXHf0(sabC+JH z$X4?PpPF5fCOfcLdvR3U;TLb>h}{D`2UB2^ps0FEPFfpOC+M8iHR*-Tfnd5)f$qo3 zfql08sQ>1T?b1Hf1j^P&E7$Sq)>-0Gv5UzHa)x0@w5)9Lu_)6!ZlB=9sst3RA&TTl=>B z6q%FlPf}G9;u7jn3hpupVaT(@Bb-D%?L&csShb%1!#VpUBHa^H^^{XK{wsxgw%&Bn zh?=8aU+v0-cLUai3fA46{ODtaJ8?4CD!2U7UIqWaZbq(tp$vGm} zWG(9TNPB-K2#1noP5&nTCIC&mQ}CUVRM{t4#kz@Gwk?O>kSu_UH2TMHddsaUyj}f% z+w6 zbC=3k!90!wNl;?Ph?+hn)?||7HT2Y%Fa-|Rp)KD(kSTa5&aU^gM=H3Ap`#O;_XOj+ zpJ0bhc6Myq&N*N%wz2zFg5%7gr0U3{Vl%HSouk0f*oLheXtL#^KMhOM@!0Rx*y5Ax z)W`9<{cIOyxc-O!nOq!paP5eSiRgt>Mg+){uXAja0xK@JgiK0s-p(seegD3my~|tX z@e6dJ$63@QKmp$0utt!~SgFzhmv#h$!#)s!j`*Ito`2k8JG~aQKB2HK^iKsyDRmx!nbyYucx^yV`F_S{B^dLtS-1=1pMC}cpUwGfpz;~l$k$y z*J!u3e9L#?!1bc+Z>i`*|0;g0!zP2ioQZz2oBv{KT|xECmV`&jyQxzPiONEYd0Nt( zi{^dPWLw;=882}U8=UcL>QPQxwS|wR1OrAqV#}RgUfV#BN^o`VY-nWH} zCl;mXyzd=*#nR*|>Un!6P3wm~tH^#o)ONDYkpUpT%|7^PdN5ksy|?X)_{zq|ie^;P zDc~;CM9o1S`$MtJzflHf^nTFaGMp(%YeN^@z1nbzlSgJpFPJ0I+QHZZtK((|XJRF4o;^BCa3x;(wThsZg>* zIVv^7)dzW)fAK>Qc_bexT_JU5K7!T?RvxM(zoukfzMIdx(i6F0DI9z_i_x8Jvm$=X zJ4m7SCc2Q&!-TDrBAo}7tL`!EPmmxu=O3vg@h++Ovc_^Y zpw7Qf(rh3vIJC!6-&B(;82jo%;*k%!sUN@TGGJZZ5-mcbS(OP$R8>?pD&{GG$0ULq z*!LSruiW*24DS5^hGsBafU4{nIep5*8~B)l1lBjl?s{?e`i42Lx_s&1<;yv02boDV zsj`m`Z!G_=IAn@1soVBrPX@&sZx2gMb!x45i%y`t8b9E8X?>P7Pf)w~No|k#`UEfs z0ElpTBse!EZ7}dRnFmOboqz)77?3aFjN$(d?2#BQ+S{5OPYspx;CO!T+xc4-1TELU zKeHtD{bex~2%U9AN6w30IPo^Yea_y>7ol4rb*s%(#Oo5)-u`YmQo+Ft{tEcPWd$)a zCp#Y`kmSykC_~!tZ7r6}n$+6gXr-PP=Eq3$oXiF!0FS2l)zE{aQz9kaKlIsMNU17W zTs9Lqi}l1cNKdqy1(n|wsr1`pK& z+juU!uH5Oou?h;9tHLy6klML;r@0iatFII|i?*6JqqLnbhFw7Ib1mgmslW5f26g1k ze)$=V1R;ITF6vtp#(wY=Jy*ADxSH%Uz+*2PLd*fY;JETNjJ`Ctda8{EJOcO{#~1hp zAYue1)K3s$7VAt}4rcK!OyhtVfHj$htj&&i5YXgoFW97GUzRYz8KTd^v?!3STu+FY z`m?OeE#xrvE#wN$Rf+j>OKDhZ2 zFYQlR%N=-5nH1am>C=_IOv{Y``&oG7VzY?_dP*7~G+I+plTey+CQkvoYjK2Na6rv` z8z4lTu+BKbycUw&Rb#%fSrBel}M5=@Zu<( z%xp~Q`&UU_)3zS_foJ7$w(N1%Rr82 z5s!mo8zGWA2Y8q@m?HouwQ`urTXW0RmXFI@9!Z}$ZYk=aSO?Xr;PRM%I)DWakljHU z@=cgH`1?7UoDTddeefOSiK1N8eMj>1Ush6KeP7fIQS(~W)EjUwULv)?3kY|(<0PCn{faobLG6x-^h*t2#SMryfeJw}9I^Aa zRb7e@$=LTm=jH}AU=zb9R0BwC8`%jsVK#8W3wvR%w#5?!DAJ)uw8{m539%?8=vMuT z|6B8AvXp^_la4^dUEoXAC{@0(v1tBwWxI9$?*U8-&mIt}27_eM&i~&m0FSh)qo{}L zLX*yvbb0N=2DrNUpW&YJi16Tpc-E1#VEv0kpnY{sWHVr@$~`%cx|+$6jQATu1Z@%9 z(VY#EJ3W?6`JkMo@Y7g98rU%P!1y?auhF6r0LFNI=og+x zIzJLR=QmiVT>BLLX0jWc#0#T%q^Wf%-=wPxOC|TTKdChqE(k$@e*FF`H+y+r`E`?W zP2sx|JrAz%z4&&;pPtbDm*`JXw08koDwNew?sE=Wq=9pOTT0}DE=Gj@W~hJ0b*<|c3j`h*8-ErTj&Wv@Guv(F22Cj_x*73- z+#efw`rV20p7_!zm(xI=Uw5(e(|NWX`WCuJSr0DUt%kY0wIOzAq&rY7zfE+^sPgR| z9PaZaI|5Guoa{#)43q{c^(B8{syY9dX@_V+9v2^Hs9j~=;=5?x!?MGo2e*`%O~@I% zsiF;=I`u{OplQaf`cxxUq$^dQ}bWfcZETDldOH zTJzM61q)X;PaJb*71~EK8+)}~rK2Tbsu#(AGTKm5>*l8UC}Wd3NPSrqclv{+bij?j zmT^~?oy=cbF$rXR)68&87B``Os+g(%;HIIoV zM5y-ky126z~J&y>*>W~`k6YU@9o5Pjq*CSRODnS$b7nF_r1(VtcL&< zrTTW5q)@_p?k#BiKI;BR=7;>~0ASdjyj+NkGO06{j|3Ye+Py_@l#|}bexYwmk=rQM za}I~L>)d1nYQo%Br#HDfws0~NKw5#90-;o}6p;fU|MC&l`p+bGt)ra3KxbIK2viF> zLz6=EkXLrS5&a@hoG-QUIO><)`(duxg~15S3397Ho7_L1zIL}Yb0ndidAU9}cJbH< zO6L?noji)3k%co9d4+@>qwSYh>;*0+9Pb6qc5jUbtN57St`N#)x61s{4c7124P_4? zo;Fa=kVA2|TK_8l(K0u==mrmy!FKif(}yD;>EJ(x^Db!L4&~}Nl)z;ock?FkE)))@ zXy!oiI=jU)=aUIeqc4oYH!(EHPG^m(%nwBT0sQ?v@cEgGP)ikXN%9y49^;lAu*5=hAjb_J z9=i4dIm~OrsbI)Q6zAhRLfY0s-GPCAfkLj?BImiUxHAtXZ#q>W^^$h3pZ<=sD+zBF zN;Tfy8C;D*Pxl5|!^fOgqL>}!r=xALUy%FzVq+6`4&|==>;Bv`KNZLCX6AkUCSM40 z>rXRVjnUPAcUs;)Muh&IarjuHfEq}VKM^kPwidtkiPY*(*qm=SG>P@kTjx-!O-Ycs zF}UPE>SnXBDQ!!31O3Zxd@CC%=gTW^oH>rvoku=OzcWX?nBY#a&nY?`u04GY5+z8U zmD5H==rwJcE|70TtXS-!frKngBG>Z5IFkGnoNoj>;PJ9gY?f2V$1n#9rj9q5)F97N5K9L`u341W3#1y*k_WVlW5G1{MBop7 zu3P|pU42(JNZinRS2kensSvBLikhSdt7m_epcO%K=+n9Ir_E2aFrYnSv%!M568-f0 z3m9}U_Ub`xl>_aMgGAFc0Eu`xLCrYAbMs1)jH`QM&IUu>5-3@ z_c)V>0Ky1tyH4nCp+C#iK2rXN6XH1IN5zdgyRJC$HFg<+Mv@j#Z9w)`U;*WpqY?{T z3Xq6UzF@wOBn9mK$#pQUgi*9dfzp?4ewfSwE~N>QO*hfPjvx;zs&fk!rIy_vFd*%m z6s;BEy3mrQYclj8luJ-%>86x9^hFc+5rrh!qx@= zhhmEWtXvI(5-Eb!0FPZ-KYe?RKQ{M06O9>Xe2SCBpfL44jT1+h!5*NeDf;$m3s?C` zBPyW@AZqqw;gyG(4S)8|c;_JQL`oa;cxCG8_d~^zP4E4HQCFou0UynDAOa4AJ&tA`0la~Ys zdkd{+G64yfpDF@vaj%EKy9)Mb?rG$I4af_Eo2d@MLHE#-H#;aI3muGmw{qcrk5e5v zmnxLntB5tA8lI0MivpC)4V4O@KzY6|3UIUkEQ?{?hSW6Z_~rb7W1Z2RgLB%WE`@3X z75-{HTSp#BXWs|_G(}ah-r}EBZjUqwe*VG=Y|S)i-hiEF9T+UZ3i=PAw+(6*$WBl44&*`{Enp}zA%rjoxIJ*+!L=!JJ|q5_ zZ2uddsy23ZTOgNnL|6h_MP-L>e8>W*cdNlRT<<}%dhc{88_C7=!Ow=o}REcU| z|ExJe0+eq!?s{I?1Ij`WqV|b_UV&a|8YUqtuUXg8f?ys5zGy-nYh-M4U+FClNYA2F zyVk4CBQ5O#WjOZTO%Q;auOBiO%uqc(NR|Q%Zg4JZgNkX>)T6;KuGHdN;WQneQ(PF+ zX_L(SMWLEGz`^WDvEZ`4$5BrscQ0wrMD2P7aly2omJDpxxw?EmRo{WW;%M5R7sPgG z)Q|K@n)@Q!KPwXIVm@%Zr9Gty9ciyIy~V)yL`71{1fXc;hu9Qs&5m5s4PEPsLOAfL zCsZ}FOz;2GGrn)U5M(lQ;b8x?kEXZ7S5cN=2sdp9bW|F`^_P8dx6Jp#yzmxYs@7=; zDkzGT20J1N*NJ!k%ZlJcGi?YppT|rdJc@58aru26eojC1pT6fwQHrWp8#xFj;2`Cr z0JuM}_pC%%qf_*xo`WkSUBuu2el9oR(qOP!Z39};TTZ#hC^>unDd4o09*t;8RT;z- zbJTO1?>aojSmde~3v|jQD53>mL&vj?lhrNK_U>%;mjw2db2Jw$1L_!>dnBR@Z=IEx zxG-SfE(RE*@keX-+$koZYOt;eURyP(7uF7A(OmIeJVX8Y&zP_M&eQL*9^hW!Rxr2o z5EMy(=1y84ALSi0K*;LFyV?>stjO{+;I`7pbLBt~)CJL{DGuknU{-u=y(2Q7>k|^{ zbfk>nuC5;^j7+H9$A|a*kqF2fhx?^Z-}RkITLo8`&5`>Qq7Fg*`o7-~+wjj+YI@+Y z;Kp>tH8mB`DC6Bji8GfCX`m8q7|_@bw~+RB%w4j;b6fS{`zJPQhEPr1O(>m1jxT|f zWT<*>dy0a%_~*_7iE04DLkh!`WY1?6`I~O@oRWb9&Xg%ItB7?SeW;`Te(Wzx9V@kB z4(g7@^PY+1Zx{?zhn@ftlnIU~_TYHu!~nO|XMU3?ym$o5hIFk=?HP{UNf`aa*FYX! zb{V*^R(an?A6}pu{7B9C89zMtp|0z9@B4}@wMEIMK!q*F{k5{)E82&Rs z#3iW9vGDqc`yW+tc)_{9b}@O-2Hduoo`S~pba+X zNvwFqZ**PBlBKF^>drs;xAI+xZorRASEKef(Xeks?rpSdSPswd7Gvq0x|{RdhQXu0X0q>m`#JAu*9)D09+!b?(4b`1 z$+-Em)sc@XAv<|W64qu|3kusH!dD( zb$tI+=veDB!*A~Log2pQ8)4c|ifR7P|Ct4-)UuDw;$_yVqW;&!sHZR?EAKpweX+qh zdFYXUaLj)Z;73=nR=4PU0;&xF+;G+V%r^$n;Za3!eyO}>{)u>x9p$&H01>CJs-azg+#aQCCNlGi% zhHTKGNt3`US(eS`9&2~`Qb>N4`H_@@>2zf%CGRJ+G2;0j0+mENC=D2DLRmZmHlkIm z|DYKY0~-gaTkdY-l~yr|WO(A_J8_jNAu&{#%xqW2ge}d(E4WvId0{%`K;BD{jY<&p z)iR)^gojX@9oA!OR{R*iHf9!n{YK_)X5km2FJOB18LZ@NC>5Pr{MCqWC2^LOA>%t5 z`CU=lpjH0pH<#DY;%&HI;!Lc)I z0g2l6V0e$RCUPFaEDicS1$B$JPzwtv+yC{-CXd&sb2tL>-*+DGM!eEuoZGirh&HB) zlhY!CJNs+Zpa$%TaF_!msKRO1|4TZ*Bn^y4uRV+m-nmX*0i7?g&i06I>kz$VH*ilW zEN)&F!wd!HjX*U*C)g67{iW695l(0ol+ zq8DHpFhkJO8N4h#<1SM+9J_48Y`3mfcLG!sh!HTvtf+ZYr?U%v;dguJF!WZ2S`=R| zUCpAqjgloeGzm3_j1wY)-F)rnEK!MW(i01k7QK8MCF{B=Z!XF!f|iB5HDC^xywWBs z=7h=rK%@Ycb#?U621pQP70be3kI)5ACbn^bnG}`!W*1|sUA^}#TndQK)-x;ICM%I1 zD9}xrk}w_5)cle~{sN9al6Rv5G_y_lh=V|ESrZ6?G-?6*%BAk*7@7`*+7VuS4Y>2S zJ2u6NU-|l|s(uNp0}9pEMpd`?pVVHWJJ1~Hv4e0^ZLb&S5(Q{nnM3$L2rwtbHjf86 z?+uB?clv@@-sa&T&@?|1gLDDCvL^{-22}nHLg8jI0KD4Dx(|n&>Kkjv!HBfM8_Rl%yqwd?2`@fENFlQAPtvlM*rAfnPnXCAqAM2t(g!^5CNF7 z+M(3IRmU|KFm$5QuAQFv2+ZLB&p$nR@817udq?swTkRuI;gkpm)u9V7SAa6$cu)lh zM-2H0pl^dSmpvSS-{o<%0EPs=+bDM@1)94$xQQ&$>}sWW)(*XN|7xY3o_ZIK0`=x) z_T^@9COt@?kv~j^cus!tU#4p|wpW1XDR_v2-P#aABLr(-ujM6;!*t&E))W>3{TaA& zk6`C_@*Uvt(uiK5G=v7&qWH&Myu#`*$4YpTd>X0s5dqiiTF;K>*5R-7>wqFgQS)|a z!HFK%L4i4kqnuu6`dmDU!6Yu@nffvY*nFL@325g{e+S{eW9<*W`%?TKV8}r+d(;uo z!6jfs8=206>6A~QbUBU6e0f4Ukli4q$=e7_N*iCW^#>NOkvXB>GsO8aF|N|^>i}cx z@99`}JzMM9^x6yrI;^p2#xz%EdJTp_U;^`ODwUsgJp7^FJrbx=nG~;n(JYrs&a^)b zSI^44yJhcoKydCAPm#2>=0nF3wR#P9*gL3952N2H@?JZ#3>yK>Si!#VLMGw6>1V?jp_ zT3EyDQik_*pH{~+sg+Q-#GQTHpvZdqHH6)s3dHY1<^SXtm6piriXPY`7%rn;-O!7x zIL-@#`-y!=-kOE$>E<}DTyv3%%h)UPCL#>KANr|N8K&b;IBUa?i_b&qW+>oEt999E z3HaIo9DA^MhbgLloNF5@d-D;}pgTLipZps{M|swXaNI3x&>yxjz|%}mF zAVj{s*%3wt0~VcuOXv)?`NjWT%+I$&?X;)|)~a5+uB&fMcYg%v$MJ$0LJX#LGV_U$?d>v_Sb{O=mFzE zy*fC-G!8n^_05*hs1X}hHHdLP*6F(5SV<-)e6MSLmBt0S_hadxq3PRvfy@G=3Csx* zPG2zO;Rw1VSt-jUX%)v4Mm{+ncgy7hjjUE7)5Uy^dW~P5KBl3w%H&7j1(yjRPbgWT zq5uqm+QFCd8mgmsIncN0p?vX*H?&XSi2zp+E@QiBnp-W-V6