123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368 |
- macro (find_python_from_executable EXECUTABLE SUPPORTED_VERSIONS)
- file(REAL_PATH ${EXECUTABLE} EXECUTABLE)
- set(Python_EXECUTABLE ${EXECUTABLE})
- find_package(Python COMPONENTS Interpreter Development.Module Development.SABIModule)
- if (NOT Python_FOUND)
- message(FATAL_ERROR "Unable to find python matching: ${EXECUTABLE}.")
- endif()
- set(_VER "${Python_VERSION_MAJOR}.${Python_VERSION_MINOR}")
- set(_SUPPORTED_VERSIONS_LIST ${SUPPORTED_VERSIONS} ${ARGN})
- if (NOT _VER IN_LIST _SUPPORTED_VERSIONS_LIST)
- message(FATAL_ERROR
- "Python version (${_VER}) is not one of the supported versions: "
- "${_SUPPORTED_VERSIONS_LIST}.")
- endif()
- message(STATUS "Found python matching: ${EXECUTABLE}.")
- endmacro()
- function (run_python OUT EXPR ERR_MSG)
- execute_process(
- COMMAND
- "${Python_EXECUTABLE}" "-c" "${EXPR}"
- OUTPUT_VARIABLE PYTHON_OUT
- RESULT_VARIABLE PYTHON_ERROR_CODE
- ERROR_VARIABLE PYTHON_STDERR
- OUTPUT_STRIP_TRAILING_WHITESPACE)
- if(NOT PYTHON_ERROR_CODE EQUAL 0)
- message(FATAL_ERROR "${ERR_MSG}: ${PYTHON_STDERR}")
- endif()
- set(${OUT} ${PYTHON_OUT} PARENT_SCOPE)
- endfunction()
- macro (append_cmake_prefix_path PKG EXPR)
- run_python(_PREFIX_PATH
- "import ${PKG}; print(${EXPR})" "Failed to locate ${PKG} path")
- list(APPEND CMAKE_PREFIX_PATH ${_PREFIX_PATH})
- endmacro()
- function (hipify_sources_target OUT_SRCS NAME ORIG_SRCS)
-
-
-
- set(SRCS ${ORIG_SRCS})
- set(CXX_SRCS ${ORIG_SRCS})
- list(FILTER SRCS EXCLUDE REGEX "\.(cc)|(cpp)$")
- list(FILTER CXX_SRCS INCLUDE REGEX "\.(cc)|(cpp)$")
-
-
-
-
-
- set(HIP_SRCS)
- foreach (SRC ${SRCS})
- string(REGEX REPLACE "\.cu$" "\.hip" SRC ${SRC})
- string(REGEX REPLACE "cuda" "hip" SRC ${SRC})
- list(APPEND HIP_SRCS "${CMAKE_CURRENT_BINARY_DIR}/${SRC}")
- endforeach()
- set(CSRC_BUILD_DIR ${CMAKE_CURRENT_BINARY_DIR}/kernels)
- add_custom_target(
- hipify${NAME}
- COMMAND ${CMAKE_SOURCE_DIR}/cmake/hipify.py -p ${CMAKE_SOURCE_DIR}/kernels -o ${CSRC_BUILD_DIR} ${SRCS}
- DEPENDS ${CMAKE_SOURCE_DIR}/cmake/hipify.py ${SRCS}
- BYPRODUCTS ${HIP_SRCS}
- COMMENT "Running hipify on ${NAME} extension source files.")
-
- list(APPEND HIP_SRCS ${CXX_SRCS})
- set(${OUT_SRCS} ${HIP_SRCS} PARENT_SCOPE)
- endfunction()
- function (get_torch_gpu_compiler_flags OUT_GPU_FLAGS GPU_LANG)
- if (${GPU_LANG} STREQUAL "CUDA")
-
-
-
- run_python(GPU_FLAGS
- "from torch.utils.cpp_extension import COMMON_NVCC_FLAGS; print(';'.join(COMMON_NVCC_FLAGS))"
- "Failed to determine torch nvcc compiler flags")
- if (CUDA_VERSION VERSION_GREATER_EQUAL 11.8)
- list(APPEND GPU_FLAGS "-DENABLE_FP8")
- endif()
- if (CUDA_VERSION VERSION_GREATER_EQUAL 12.0)
- list(REMOVE_ITEM GPU_FLAGS
- "-D__CUDA_NO_HALF_OPERATORS__"
- "-D__CUDA_NO_HALF_CONVERSIONS__"
- "-D__CUDA_NO_BFLOAT16_CONVERSIONS__"
- "-D__CUDA_NO_HALF2_OPERATORS__")
- endif()
- elseif(${GPU_LANG} STREQUAL "HIP")
-
-
-
- run_python(GPU_FLAGS
- "import torch.utils.cpp_extension as t; print(';'.join(t.COMMON_HIP_FLAGS + t.COMMON_HIPCC_FLAGS))"
- "Failed to determine torch nvcc compiler flags")
- list(APPEND GPU_FLAGS
- "-DUSE_ROCM"
- "-DENABLE_FP8"
- "-U__HIP_NO_HALF_CONVERSIONS__"
- "-U__HIP_NO_HALF_OPERATORS__"
- "-fno-gpu-rdc")
- endif()
- set(${OUT_GPU_FLAGS} ${GPU_FLAGS} PARENT_SCOPE)
- endfunction()
- macro(string_to_ver OUT_VER IN_STR)
- string(REGEX REPLACE "\([0-9]+\)\([0-9]\)" "\\1.\\2" ${OUT_VER} ${IN_STR})
- endmacro()
- macro(override_gpu_arches GPU_ARCHES GPU_LANG GPU_SUPPORTED_ARCHES)
- set(_GPU_SUPPORTED_ARCHES_LIST ${GPU_SUPPORTED_ARCHES} ${ARGN})
- message(STATUS "${GPU_LANG} supported arches: ${_GPU_SUPPORTED_ARCHES_LIST}")
- if (${GPU_LANG} STREQUAL "HIP")
-
-
-
-
-
-
-
-
- if(DEFINED ENV{PYTORCH_ROCM_ARCH})
- set(HIP_ARCHITECTURES $ENV{PYTORCH_ROCM_ARCH})
- else()
- set(HIP_ARCHITECTURES ${CMAKE_HIP_ARCHITECTURES})
- endif()
-
-
-
-
- set(${GPU_ARCHES})
- foreach (_ARCH ${HIP_ARCHITECTURES})
- if (_ARCH IN_LIST _GPU_SUPPORTED_ARCHES_LIST)
- list(APPEND ${GPU_ARCHES} ${_ARCH})
- endif()
- endforeach()
- if(NOT ${GPU_ARCHES})
- message(FATAL_ERROR
- "None of the detected ROCm architectures: ${HIP_ARCHITECTURES} is"
- " supported. Supported ROCm architectures are: ${_GPU_SUPPORTED_ARCHES_LIST}.")
- endif()
- elseif(${GPU_LANG} STREQUAL "CUDA")
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- message(DEBUG "initial CMAKE_CUDA_FLAGS: ${CMAKE_CUDA_FLAGS}")
-
- string(REGEX MATCHALL "-gencode arch=[^ ]+" _CUDA_ARCH_FLAGS
- ${CMAKE_CUDA_FLAGS})
-
-
- string(REGEX REPLACE "-gencode arch=[^ ]+ *" "" CMAKE_CUDA_FLAGS
- ${CMAKE_CUDA_FLAGS})
-
-
- if (NOT _CUDA_ARCH_FLAGS)
- message(FATAL_ERROR
- "Could not find any architecture related code generation flags in "
- "CMAKE_CUDA_FLAGS. (${CMAKE_CUDA_FLAGS})")
- endif()
- message(DEBUG "final CMAKE_CUDA_FLAGS: ${CMAKE_CUDA_FLAGS}")
- message(DEBUG "arch flags: ${_CUDA_ARCH_FLAGS}")
-
- set(${GPU_ARCHES})
-
- foreach(_ARCH ${_CUDA_ARCH_FLAGS})
-
-
-
-
- string(REGEX MATCH "arch=compute_\([0-9]+a?\)" _COMPUTE ${_ARCH})
- if (_COMPUTE)
- set(_COMPUTE ${CMAKE_MATCH_1})
- endif()
- string(REGEX MATCH "code=sm_\([0-9]+a?\)" _SM ${_ARCH})
- if (_SM)
- set(_SM ${CMAKE_MATCH_1})
- endif()
- string(REGEX MATCH "code=compute_\([0-9]+a?\)" _CODE ${_ARCH})
- if (_CODE)
- set(_CODE ${CMAKE_MATCH_1})
- endif()
-
- if (NOT _COMPUTE)
- message(FATAL_ERROR
- "Could not determine virtual architecture from: ${_ARCH}.")
- endif()
-
- if ((NOT _SM) AND (NOT _CODE))
- message(FATAL_ERROR
- "Could not determine a codegen architecture from: ${_ARCH}.")
- endif()
- if (_SM)
-
-
- set(_VIRT "-real")
- set(_CODE_ARCH ${_SM})
- else()
-
- set(_VIRT "-virtual")
- set(_CODE_ARCH ${_CODE})
- endif()
-
- string_to_ver(_CODE_VER ${_CODE_ARCH})
- if (NOT _CODE_VER IN_LIST _GPU_SUPPORTED_ARCHES_LIST)
- message(STATUS "discarding unsupported CUDA arch ${_VER}.")
- continue()
- endif()
-
- list(APPEND ${GPU_ARCHES} "${_CODE_ARCH}${_VIRT}")
- endforeach()
- endif()
- message(STATUS "${GPU_LANG} target arches: ${${GPU_ARCHES}}")
- endmacro()
- function (define_gpu_extension_target GPU_MOD_NAME)
- cmake_parse_arguments(PARSE_ARGV 1
- GPU
- "WITH_SOABI"
- "DESTINATION;LANGUAGE;USE_SABI"
- "SOURCES;ARCHITECTURES;COMPILE_FLAGS;INCLUDE_DIRECTORIES;LIBRARIES")
-
- if (GPU_LANGUAGE STREQUAL "HIP")
- hipify_sources_target(GPU_SOURCES ${GPU_MOD_NAME} "${GPU_SOURCES}")
- endif()
- if (GPU_WITH_SOABI)
- set(GPU_WITH_SOABI WITH_SOABI)
- else()
- set(GPU_WITH_SOABI)
- endif()
- if (GPU_USE_SABI)
- Python_add_library(${GPU_MOD_NAME} MODULE USE_SABI ${GPU_USE_SABI} ${GPU_WITH_SOABI} "${GPU_SOURCES}")
- else()
- Python_add_library(${GPU_MOD_NAME} MODULE ${GPU_WITH_SOABI} "${GPU_SOURCES}")
- endif()
- if (GPU_LANGUAGE STREQUAL "HIP")
-
- add_dependencies(${GPU_MOD_NAME} hipify${GPU_MOD_NAME})
- endif()
- if (GPU_ARCHITECTURES)
- set_target_properties(${GPU_MOD_NAME} PROPERTIES
- ${GPU_LANGUAGE}_ARCHITECTURES "${GPU_ARCHITECTURES}")
- endif()
- set_property(TARGET ${GPU_MOD_NAME} PROPERTY CXX_STANDARD 20)
- target_compile_options(${GPU_MOD_NAME} PRIVATE
- $<$<COMPILE_LANGUAGE:${GPU_LANGUAGE}>:${GPU_COMPILE_FLAGS}>)
- target_compile_definitions(${GPU_MOD_NAME} PRIVATE
- "-DTORCH_EXTENSION_NAME=${GPU_MOD_NAME}")
- target_include_directories(${GPU_MOD_NAME} PRIVATE kernels
- ${GPU_INCLUDE_DIRECTORIES})
- target_link_libraries(${GPU_MOD_NAME} PRIVATE torch ${torch_python_LIBRARY}
- ${GPU_LIBRARIES})
-
-
- if (GPU_LANGUAGE STREQUAL "CUDA")
- target_link_libraries(${GPU_MOD_NAME} PRIVATE ${CUDA_CUDA_LIB}
- ${CUDA_LIBRARIES})
- else()
- target_link_libraries(${GPU_MOD_NAME} PRIVATE ${TORCH_LIBRARIES})
- endif()
- install(TARGETS ${GPU_MOD_NAME} LIBRARY DESTINATION ${GPU_DESTINATION})
- endfunction()
|