Skip to content

Commit

Permalink
houdini stock nodes: use opencl,karma device tags
Browse files Browse the repository at this point in the history
  • Loading branch information
pedohorse committed Jul 30, 2024
1 parent e9925d7 commit 4c08ea3
Show file tree
Hide file tree
Showing 6 changed files with 116 additions and 26 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
from lifeblood.nodethings import ProcessingResult, ProcessingError
from lifeblood.invocationjob import InvocationJob, InvocationEnvironment
from lifeblood.text import filter_by_pattern
from lifeblood_stock_houdini_helpers.common import gpu_device_env_common_code

from typing import Iterable

Expand Down Expand Up @@ -237,8 +238,14 @@ def process_task(self, context) -> ProcessingResult:

script += f'print("all done!")\n'

inv = InvocationJob(['hython', ':/work_to_do.py'], env=env)
launch_wrapper_code = (
gpu_device_env_common_code() +
'import sys, subprocess\n'
'sys.exit(subprocess.Popen(sys.argv[1:]).wait())')

inv = InvocationJob(['python', ':/launch_wrapper.py', 'hython', ':/work_to_do.py'], env=env)
inv.set_extra_file('work_to_do.py', script)
inv.set_extra_file('launch_wrapper.py', launch_wrapper_code)
res = ProcessingResult(job=inv)
return res

Expand Down
9 changes: 8 additions & 1 deletion src/lifeblood/stock_nodes/houdini/nodes/hip_ifd_generator.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
from lifeblood.nodethings import ProcessingResult, ProcessingError
from lifeblood.invocationjob import InvocationJob, InvocationEnvironment
from lifeblood.text import filter_by_pattern
from lifeblood_stock_houdini_helpers.common import gpu_device_env_common_code

from typing import Iterable

Expand Down Expand Up @@ -134,8 +135,14 @@ def process_task(self, context) -> ProcessingResult:
script += \
f'print("all done!")\n'

inv = InvocationJob(['hython', ':/work_to_do.py'], env=env)
launch_wrapper_code = (
gpu_device_env_common_code() +
'import sys, subprocess\n'
'sys.exit(subprocess.Popen(sys.argv[1:]).wait())')

inv = InvocationJob(['python', ':/launch_wrapper.py', 'hython', ':/work_to_do.py'], env=env)
inv.set_extra_file('work_to_do.py', script)
inv.set_extra_file('launch_wrapper.py', launch_wrapper_code)
res = ProcessingResult(job=inv)
return res

Expand Down
9 changes: 8 additions & 1 deletion src/lifeblood/stock_nodes/houdini/nodes/hip_script.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
from lifeblood.nodethings import ProcessingResult, ProcessingError
from lifeblood.invocationjob import InvocationJob, InvocationEnvironment
from lifeblood.text import filter_by_pattern
from lifeblood_stock_houdini_helpers.common import gpu_device_env_common_code

from typing import Iterable

Expand Down Expand Up @@ -75,8 +76,14 @@ def process_task(self, context) -> ProcessingResult:
'__main_body__()\n' \
f'hou.hipFile.save({repr(dest_hip)})\n'

job = InvocationJob(['hython', ':/work_to_do.py'])
launch_wrapper_code = (
gpu_device_env_common_code() +
'import sys, subprocess\n'
'sys.exit(subprocess.Popen(sys.argv[1:]).wait())')

job = InvocationJob(['python', ':/launch_wrapper.py', 'hython', ':/work_to_do.py'])
job.set_extra_file('work_to_do.py', script)
job.set_extra_file('launch_wrapper.py', launch_wrapper_code)
return ProcessingResult(job=job)

def postprocess_task(self, context) -> ProcessingResult:
Expand Down
54 changes: 32 additions & 22 deletions src/lifeblood/stock_nodes/houdini/nodes/karma.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
from lifeblood.enums import NodeParameterType
from lifeblood.nodethings import ProcessingResult, ProcessingError
from lifeblood.invocationjob import InvocationJob, InvocationEnvironment
from lifeblood_stock_houdini_helpers.common import gpu_device_env_common_code

from typing import Iterable

Expand Down Expand Up @@ -40,6 +41,7 @@ def __init__(self, name):
ui = self.get_ui()
with ui.initializing_interface_lock():
ui.color_scheme().set_main_color(0.5, 0.25, 0.125)
ui.add_parameter('renderer', 'engine', NodeParameterType.STRING, 'cpu').add_menu((('CPU', 'cpu'), ('XPU', 'xpu')))
ui.add_parameter('usd path', 'usd file path', NodeParameterType.STRING, "`task['file']`")
ui.add_parameter('image path', 'output image file path', NodeParameterType.STRING, "`task['outimage']`")
ui.add_parameter('skip if exists', 'skip if result already exists', NodeParameterType.BOOL, False)
Expand All @@ -51,28 +53,36 @@ def process_task(self, context) -> ProcessingResult:
args = context.task_attributes()

env = InvocationEnvironment()

if context.param_value('skip if exists'):
script = 'import os\n' \
'if not os.path.exists({imgpath}):\n' \
' import sys\n' \
' from subprocess import Popen\n' \
" sys.exit(Popen(['husk', '-V', '2a', '--make-output-path',{doframe} '-o', {imgpath}, {usdpath}]).wait())\n" \
"else:\n" \
" print('image file already exists, skipping work')\n" \
.format(imgpath=repr(context.param_value('image path')),
usdpath=repr(context.param_value('usd path')),
doframe=f" '-f', {repr(str(args['frames'][0]))}," if 'frames' in args else '',
)

invoc = InvocationJob(['python', ':/karmacall.py'])
invoc.set_extra_file('karmacall.py', script)
else: # TODO: -f there is testing, if succ - make a parameter out of it on the node or smth
invoc = InvocationJob(['husk', '-V', '2a',
'--make-output-path'] +
(['-f', str(args['frames'][0])] if 'frames' in args else []) +
['-o', context.param_value('image path'), context.param_value('usd path')],
env=env)
delegates_map = {
'cpu': 'BRAY_HdKarma',
'xpu': 'BRAY_HdKarmaXPU',
}
default_delegate = 'BRAY_HdKarma'

do_skip_exists = context.param_value('skip if exists')
script = gpu_device_env_common_code() + \
'import os\n' \
'if not {do_skip_exists} or not os.path.exists({imgpath}):\n' \
' import sys\n' \
' from subprocess import Popen\n' \
" sys.exit(Popen(['husk', '-R', {delegate}, '-V', '2a', '--make-output-path',{doframe} '-o', {imgpath}, {usdpath}]).wait())\n" \
"else:\n" \
" print('image file already exists, skipping work')\n" \
.format(imgpath=repr(context.param_value('image path')),
usdpath=repr(context.param_value('usd path')),
delegate=repr(delegates_map.get(context.param_value('renderer'), default_delegate)),
doframe=f" '-f', {repr(str(args['frames'][0]))}," if 'frames' in args else '',
do_skip_exists=repr(do_skip_exists),
)

invoc = InvocationJob(['python', ':/karmacall.py'])
invoc.set_extra_file('karmacall.py', script)
# else: # TODO: -f there is testing, if succ - make a parameter out of it on the node or smth
# invoc = InvocationJob(['husk', '-R', delegates_map.get(context.param_value('renderer'), default_delegate), '-V', '2a',
# '--make-output-path'] +
# (['-f', str(args['frames'][0])] if 'frames' in args else []) +
# ['-o', context.param_value('image path'), context.param_value('usd path')],
# env=env)
res = ProcessingResult(invoc)
return res

Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,51 @@

def gpu_device_env_common_code():
return (
'import os\n'
'from lifeblood_connection import get_provided_devices\n'
'\n'
'def _do_gpu_related_env_setup():\n'
' provided_devices = get_provided_devices()\n'
' if "{gpu_dev_type}" not in provided_devices:\n'
' return\n'
' enabled_karma_devices_raw = {{dev_tags["karma_dev"] for _, dev_tags in get_provided_devices().get("{gpu_dev_type}", {{}}).items() if "karma_dev" in dev_tags}}\n'
' opencl_devices_raw = [dev_tags["houdini_ocl"] for _, dev_tags in get_provided_devices().get("{gpu_dev_type}", {{}}).items() if "houdini_ocl" in dev_tags]\n'
' enabled_karma_devices = set()\n'
' optix_device_count = 0\n'
' try:\n'
' for x in enabled_karma_devices_raw:\n'
' if "/" in x:\n'
' x, e = x.split("/")\n'
' optix_device_count = max(int(x)+1, optix_device_count, int(e))\n'
' enabled_karma_devices.add(int(x))\n'
' except ValueError:\n'
' print("karma_dev tag of gpu device must be an integer! disabling all gpu")\n'
'\n'
' if len(opencl_devices_raw) == 0 or ":" not in opencl_devices_raw[0]:\n'
' os.environ["HOUDINI_OCL_DEVICETYPE"] = "CPU"\n'
' print("no GPU opencl devices enabled")\n'
' else:\n'
' ocl_dev_type, ocl_dev_vendor = opencl_devices_raw[0].split(":", 1)\n'
' ocl_dev_num = None\n'
' if ":" in ocl_dev_vendor:\n'
' ocl_dev_vendor, ocl_dev_num = ocl_dev_vendor.rsplit(":", 1)\n'
' print(f"ocl device type \'{{ocl_dev_type}}\' num \'{{ocl_dev_num}}\' vendor \'{{ocl_dev_vendor}}\' is enabled")\n'
' os.environ["HOUDINI_OCL_DEVICETYPE"] = ocl_dev_type\n'
' if ocl_dev_vendor:\n'
' os.environ["HOUDINI_OCL_VENDOR"] = ocl_dev_vendor\n'
' if ocl_dev_num:\n'
' os.environ["HOUDINI_OCL_DEVICENUMBER"] = ocl_dev_num\n'
'\n'
' if len(enabled_karma_devices) == 0:\n'
' os.environ["KARMA_XPU_DISABLE_OPTIX_DEVICE"] = "1"\n'
' print("no karma xpu optix devices enabled")\n'
' for i in range(optix_device_count):\n'
' if i in enabled_karma_devices:\n'
' print(f"karma xpu optix device {{i}} enabled")\n'
' continue\n'
' os.environ[f"KARMA_XPU_DISABLE_DEVICE_{{i}}"] = "1"\n'
'\n'
'_do_gpu_related_env_setup()\n'
'\n').format(
gpu_dev_type="gpu",
)
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
from lifeblood.nodethings import ProcessingResult, ProcessingError
from lifeblood.invocationjob import InvocationJob, InvocationEnvironment
from lifeblood.text import filter_by_pattern
from .common import gpu_device_env_common_code

from typing import Iterable, Optional

Expand Down Expand Up @@ -126,6 +127,7 @@ def process_task(self, context) -> ProcessingResult:
spawnlines = ''

script = \
gpu_device_env_common_code() + \
f'import os\n' \
f'import hou\n' \
f'import lifeblood_connection\n'
Expand Down Expand Up @@ -212,8 +214,14 @@ def process_task(self, context) -> ProcessingResult:
script += \
f'print("all done!")\n'

inv = InvocationJob(['hython', ':/work_to_do.py'], env=env)
launch_wrapper_code = (
gpu_device_env_common_code() +
'import sys, subprocess\n'
'sys.exit(subprocess.Popen(sys.argv[1:]).wait())')

inv = InvocationJob(['python', ':/launch_wrapper.py', 'hython', ':/work_to_do.py'], env=env)
inv.set_extra_file('work_to_do.py', script)
inv.set_extra_file('launch_wrapper.py', launch_wrapper_code)
res = ProcessingResult(job=inv)
return res

Expand Down

0 comments on commit 4c08ea3

Please sign in to comment.