ZLUDA/Cargo.toml
Andrzej Janik cd7e2f8e36 Force loading ZLUDA through LD_PRELOAD (#447)
Certain applications (pytorch) decide that it's a great idea to distribute whole CUDA driver and link to it with DT_RPATH. This igores LD_LIBRARY_PATH.
This code defeats that evil mechanism through any means necessary
2025-07-31 18:00:13 -07:00

50 lines
897 B
TOML

[workspace]
resolver = "2"
members = [
"comgr",
"cuda_macros",
"cuda_types",
"dark_api",
"detours-sys",
"ext/amd_comgr-sys",
"ext/hip_runtime-sys",
"ext/rocblas-sys",
"format",
"ptx",
"ptx_parser",
"ptx_parser_macros",
"ptx_parser_macros_impl",
"xtask",
"zluda",
"zluda_bindgen",
"zluda_blas",
"zluda_blaslt",
"zluda_common",
"zluda_dnn",
"zluda_trace",
"zluda_trace_blas",
"zluda_trace_blaslt",
"zluda_trace_common",
"zluda_trace_dnn",
"zluda_trace_fft",
"zluda_trace_sparse",
"zluda_fft",
"zluda_inject",
"zluda_ml",
"zluda_preload",
"zluda_redirect",
"zluda_sparse",
]
default-members = ["zluda", "zluda_ml", "zluda_inject", "zluda_redirect"]
[profile.release-lto]
inherits = "release"
codegen-units = 1
lto = true
[profile.dev.package.xtask]
opt-level = 2