SparkMacrosConfig.cmake 7.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161
  1. cmake_minimum_required(VERSION 3.5.1)
  2. # 定义一些 macro 用于自动生成构建结构
  3. # spark_add_library <lib_name> [files]...
  4. # 构建一个库,基于指定的源文件
  5. # 并根据库名生成 target_link_<lib_name> 函数
  6. macro(spark_add_library _lib_name)
  7. spark_debug_message("================ ${_lib_name} Library ================")
  8. add_library(${_lib_name} ${ARGN})
  9. set(SRCS ${ARGN})
  10. foreach(item IN LISTS SRCS)
  11. spark_debug_message(" -> ${item}")
  12. endforeach(item IN LISTS SRCS)
  13. function(target_link_${_lib_name} TARGET)
  14. spark_debug_message("${_lib_name}")
  15. target_link_libraries(${TARGET} ${_lib_name})
  16. endfunction(target_link_${_lib_name} TARGET)
  17. endmacro(spark_add_library _lib_name)
  18. # spark_add_library_path <lib_name> <lib_path>
  19. # 构建一个库,基于指定的路径
  20. # 并根据库名生成 target_link_<lib_name> 函数
  21. # 函数内增加以 <lib_path> 头文件搜索路径
  22. macro(spark_add_library_path _lib_name _lib_path)
  23. set(${_lib_name}_SOURCE_PATH ${_lib_path})
  24. set(${_lib_name}_TYPE)
  25. if(${${_lib_name}_SOURCE_PATH} STREQUAL SHARED OR ${${_lib_name}_SOURCE_PATH} STREQUAL STATIC)
  26. set(${_lib_name}_SOURCE_PATH ${ARGV2})
  27. set(${_lib_name}_TYPE ${_lib_path})
  28. spark_debug_message("_lib_path: ${${_lib_name}_SOURCE_PATH}(${ARGV2})[${${_lib_name}_TYPE}]")
  29. if(${ARGC} LESS 3)
  30. message(FATAL_ERROR "Missing parameter, library path not specified.")
  31. endif(${ARGC} LESS 3)
  32. endif(${${_lib_name}_SOURCE_PATH} STREQUAL SHARED OR ${${_lib_name}_SOURCE_PATH} STREQUAL STATIC)
  33. aux_source_directory(${${_lib_name}_SOURCE_PATH} ${_lib_name}_SOURCES)
  34. # message("================ spark_add_library_path: ${_lib_name} ================")
  35. file(GLOB UI_LIST RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} ${${_lib_name}_SOURCE_PATH}/*.ui)
  36. add_library(${_lib_name} ${${_lib_name}_TYPE} ${${_lib_name}_SOURCES} ${UI_LIST})
  37. spark_debug_message("${_lib_name}_SOURCES: ${${_lib_name}_SOURCES}, ${${_lib_name}_SOURCE_PATH}")
  38. foreach(item IN LISTS ${_lib_name}_SOURCES)
  39. spark_debug_message(" -> ${item}")
  40. endforeach(item IN LISTS ${_lib_name}_SOURCES)
  41. function(target_link_${_lib_name} TARGET)
  42. # spark_debug_message("target_link_${_lib_name}")
  43. spark_debug_message(" -> (include): ${${_lib_name}_SOURCE_PATH}")
  44. target_include_directories(${TARGET} PUBLIC "${${_lib_name}_SOURCE_PATH}")
  45. target_link_libraries(${TARGET} ${_lib_name})
  46. endfunction(target_link_${_lib_name} TARGET)
  47. function(target_include_${_lib_name} TARGET)
  48. # spark_debug_message("target_link_${_lib_name}")
  49. spark_debug_message(" -> (include): ${${_lib_name}_SOURCE_PATH}")
  50. target_include_directories(${TARGET} PUBLIC "${${_lib_name}_SOURCE_PATH}")
  51. # target_link_libraries(${TARGET} ${_lib_name})
  52. endfunction(target_include_${_lib_name} TARGET)
  53. # file(GLOB HEADER_LIST RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} ${${_lib_name}_SOURCE_PATH}/*.h)
  54. target_include_directories(${_lib_name} PUBLIC "${${_lib_name}_SOURCE_PATH}")
  55. # 如果想用以下操作手动实现 target_link_include_directories
  56. # 请注意对 LIST 类型使用 "" 进行包围
  57. # target_link_include_directories 的 PUBLIC 将会填充(追加)目标的 INCLUDE_DIRECTORIES 属性
  58. # target_link_include_directories 支持 cmake 生成大表达式,更容易操作,手动将无法实现此类能力
  59. # target_link_include_directories 支持相对路径和绝对路径参数
  60. # 手动操作将必须使用绝对路径,这是不好的地方
  61. # get_target_property(_lib_include_directories ${_lib_name} INCLUDE_DIRECTORIES)
  62. # list(APPEND _lib_include_directories "${CMAKE_CURRENT_LIST_DIR}/${${_lib_name}_SOURCE_PATH}")
  63. # spark_debug_message("----> ${CMAKE_CURRENT_LIST_DIR}/${${_lib_name}_SOURCE_PATH}")
  64. # spark_debug_message("----> ${_lib_include_directories}")
  65. # set_target_properties(${_lib_name} PROPERTIES
  66. # INCLUDE_DIRECTORIES "${_lib_include_directories}"
  67. # INTERFACE_INCLUDE_DIRECTORIES "${_lib_include_directories}"
  68. # )
  69. endmacro(spark_add_library_path _lib_name _lib_path)
  70. # spark_add_executable <exec_name> [files]...
  71. # 构建一个可执行文件,基于指定的源文件
  72. # Qt编译时源文件包括很多类型,需要指定 *.h/*.cpp/*.qrc/*.qm/... 等
  73. macro(spark_add_executable _exec_name)
  74. spark_debug_message("================ ${_exec_name} Executable ================")
  75. add_executable(${_exec_name} ${ARGN})
  76. endmacro(spark_add_executable _exec_name)
  77. macro(spark_add_executable_path _exec_name _exec_path)
  78. aux_source_directory(${_exec_path} ${_exec_name}_SOURCES)
  79. spark_debug_message("================ ${_exec_name} Executable ================")
  80. file(GLOB UI_LIST RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} ${_exec_path}/*.ui)
  81. add_executable(${_exec_name} ${${_exec_name}_SOURCES} ${ARGN} ${UI_LIST})
  82. foreach(item IN LISTS ${_exec_name}_SOURCES)
  83. spark_debug_message(" -> ${item}")
  84. endforeach(item IN LISTS ${_exec_name}_SOURCES)
  85. # function(target_link_${_exec_name} TARGET)
  86. # spark_debug_message("target_link_${_lib_name}")
  87. spark_debug_message(" -> (include): ${_exec_path}")
  88. target_include_directories(${_exec_name} PUBLIC "${_exec_path}")
  89. # target_link_libraries(${TARGET} ${_lib_name})
  90. # endfunction(target_link_${_exec_name} TARGET)
  91. # target_link_${_exec_name}(${_exec_name})
  92. endmacro(spark_add_executable_path _exec_name _exec_path)
  93. # spark_find_library
  94. # 搜索一个库,基于指定的库名,调用 pkg-config 搜索库
  95. # 并根据库名生成一个 target_link_<prefix> 函数
  96. macro(spark_find_library _prefix)
  97. find_package(PkgConfig REQUIRED)
  98. # libnotify
  99. pkg_check_modules(${_prefix} ${ARGN})
  100. function(target_link_${_prefix} TARGET)
  101. target_include_directories(${TARGET} PUBLIC
  102. ${${_prefix}_INCLUDE_DIRS})
  103. target_link_libraries(${TARGET}
  104. ${${_prefix}_LIBRARIES})
  105. endfunction(target_link_${_prefix} TARGET)
  106. endmacro(spark_find_library _prefix)
  107. # spark_add_executable_paths
  108. # 自定义构建宏,基于指定的前缀名称,处理后续参数为子目录
  109. # item: 为进行遍历后的单项,类似于 python3 中的 (for item in items:)
  110. # file: 为在目录中不以递归(GLOB_RECURSE)方式寻找 qrc 文件,需要将其参与编译才能被 rcc
  111. # 并根据 prefix-<item> 生成构建目标,
  112. macro(spark_add_executable_paths _prefix_path)
  113. set(PATHS ${ARGN})
  114. foreach(item IN LISTS PATHS)
  115. file(GLOB QRCS "${item}/*.qrc")
  116. spark_debug_message(">>> add_executable: " "${_prefix_path}-${item} ${item} + ${QRCS}")
  117. spark_add_executable_path(${_prefix_path}-${item} ${item} ${QRCS})
  118. target_link_qt5(${_prefix_path}-${item})
  119. endforeach(item IN LISTS PATHS)
  120. endmacro(spark_add_executable_paths _prefix_path)
  121. # spark_add_link
  122. # 自定义宏以代替当前使用 fucntion 定义 target_link_<name> 结构
  123. # _IN_NAME: 此宏生成 target_link_<name> 的要求参数
  124. # ARGN: 此宏剩余的参数列表
  125. # 在使用 target_link_<name> 时
  126. # _NAME: 用于此 fucntion 中的要求参数: <_NAME>目标将要连接此库
  127. macro(spark_add_link _IN_NAME)
  128. function(target_link_${_IN_NAME} _NAME)
  129. spark_debug_message("LINK ${_NAME} ${ARGN}")
  130. target_link_libraries(${_NAME}
  131. ${ARGN})
  132. endfunction(target_link_${_IN_NAME} _NAME)
  133. endmacro(spark_add_link _IN_NAME)