2023-11-25 14:38:38 -07:00
|
|
|
from setuptools import setup
|
|
|
|
from torch.utils.cpp_extension import BuildExtension, CUDAExtension
|
2024-01-26 08:27:44 -07:00
|
|
|
import torch
|
|
|
|
|
|
|
|
extra_cuda_cflags = ["-lineinfo", "-O3"]
|
2024-09-30 02:54:32 -06:00
|
|
|
extra_cflags = []
|
2024-01-26 08:27:44 -07:00
|
|
|
if torch.version.hip:
|
2024-09-30 02:54:32 -06:00
|
|
|
extra_cflags = ["-DLEGACY_HIPBLAS_DIRECT=ON"]
|
|
|
|
extra_cuda_cflags += ["-DHIPBLAS_USE_HIP_HALF", "-DLEGACY_HIPBLAS_DIRECT=ON"]
|
2024-01-26 08:27:44 -07:00
|
|
|
|
|
|
|
extra_compile_args = {
|
2024-09-30 02:54:32 -06:00
|
|
|
"cxx": extra_cflags,
|
2024-01-26 08:27:44 -07:00
|
|
|
"nvcc": extra_cuda_cflags,
|
|
|
|
}
|
2023-11-25 14:38:38 -07:00
|
|
|
|
|
|
|
setup(
|
|
|
|
name="exllamav2_kernels",
|
|
|
|
ext_modules=[
|
|
|
|
CUDAExtension(
|
|
|
|
name="exllamav2_kernels",
|
|
|
|
sources=[
|
|
|
|
"exllamav2_kernels/ext.cpp",
|
|
|
|
"exllamav2_kernels/cuda/q_matrix.cu",
|
|
|
|
"exllamav2_kernels/cuda/q_gemm.cu",
|
|
|
|
],
|
2024-01-26 08:27:44 -07:00
|
|
|
extra_compile_args=extra_compile_args,
|
2023-11-25 14:38:38 -07:00
|
|
|
)
|
|
|
|
],
|
|
|
|
cmdclass={"build_ext": BuildExtension},
|
|
|
|
)
|