浏览代码

spark: 全量级构建模板更新

zinface 1 年之前
父节点
当前提交
1da49440b6

二进制
assets/spark.png


+ 97 - 57
cmake/SparkAppimageConfig.cmake

@@ -24,52 +24,76 @@
     # default.desktop 文件与 default.png 文件的生成。
     # 这是一个依赖的 copy-desktop-appimage 目标,并先行执行
 
-# if ()
-set(APPIMAGE_OUTPUT  "${CMAKE_BINARY_DIR}/appimage")
-set(APPIMAGE_ICON    "${APPIMAGE_OUTPUT}/default.png")
-set(APPIMAGE_DESTKOP "${APPIMAGE_OUTPUT}/default.desktop")
-# set(LINUXDEPLOYQT)
-# set(APPIMAGETOOL)
-
-function(execute_linuxdeploy _PATH)
-    execute_process(COMMAND ${LINUXDEPLOYQT}
-        WORKING_DIRECTORY "${APPIMAGE_OUTPUT}"
-        )
-endfunction(execute_linuxdeploy _PATH)
-
-function(target_linuxdeploy)
-    add_custom_target(linuxdeploy pwd
-        BYPRODUCTS appimage
-        COMMAND cp ../${PROJECT_NAME} .
-        COMMAND "${LINUXDEPLOYQT}" ${PROJECT_NAME} -appimage -unsupported-allow-new-glibc -verbose=3 -no-strip|| true
-        COMMAND cp ../spark-appimage.desktop default.desktop
-        COMMAND cp ../spark-appimage.png     default.png
-        WORKING_DIRECTORY "${APPIMAGE_OUTPUT}")
-endfunction(target_linuxdeploy)
+# 要求:
+    # LINUXDEPLOYQT   提供的外部参数,一般指 linuxdeployqt 程序路径
+    # APPIMAGETOOL    提供的外部参数,一般指 appimagetool 程序路径
+
+option(USE_APPIMAGE_NEW_GLIBC "允许在打包过程中使用较新版本的 glibc 库" ON)
+
+set(APPIMAGE_OUTPUT         "${CMAKE_BINARY_DIR}/appimage")
+set(APPIMAGE_OUTPUT_ICON    "${APPIMAGE_OUTPUT}/default.png")
+set(APPIMAGE_OUTPUT_DESTKOP "${APPIMAGE_OUTPUT}/default.desktop")
+
+# 1. 添加一个可以用于 Appimage 使用的图标文件
+function(add_appimage_icon _icon)
+    if(CMAKE_VERSION VERSION_LESS 3.21)
+        message("> cmake version is less than 3.21")
+        configure_file(${_icon} ${APPIMAGE_OUTPUT_ICON} COPYONLY)
+    else()
+        file(MAKE_DIRECTORY ${APPIMAGE_OUTPUT})
+        file(COPY_FILE ${_icon} ${APPIMAGE_OUTPUT_ICON})
+    endif(CMAKE_VERSION VERSION_LESS 3.21)
+endfunction(add_appimage_icon _icon)
+
+# 2. 基于 SparkDesktopMacros.cmake 提供的宏来定义 desktop 内容说明
+    # 使用与自身的 desktop.in 模板进行生成
+function(add_appimage_desktop)
+    configure_file(cmake/spark-appimage.desktop.in.txt
+        ${APPIMAGE_OUTPUT_DESTKOP} @ONLY)
+endfunction(add_appimage_desktop)
+
+function(target_linuxdeploy _target)
+
+    if(USE_APPIMAGE_NEW_GLIBC)
+        message("Use New glibc")
+        add_custom_target(linuxdeploy pwd
+            BYPRODUCTS appimage
+            COMMAND "${LINUXDEPLOYQT}" $<TARGET_FILE:${_target}> -appimage -unsupported-allow-new-glibc -verbose=3 -no-strip || true
+            WORKING_DIRECTORY "${APPIMAGE_OUTPUT}")
+    else()
+        message("Un Use New glibc")
+        add_custom_target(linuxdeploy pwd
+            BYPRODUCTS appimage
+            COMMAND "${LINUXDEPLOYQT}" $<TARGET_FILE:${_target}> -appimage -verbose=3 -no-strip || true
+            WORKING_DIRECTORY "${APPIMAGE_OUTPUT}")
+    endif(USE_APPIMAGE_NEW_GLIBC)
+
+endfunction(target_linuxdeploy _target)
 
 function(target_appimage)
-    add_custom_target(copy-desktop-appimage
-        COMMAND cp ../spark-appimage.desktop default.desktop
-        COMMAND cp ../spark-appimage.png     default.png
-        WORKING_DIRECTORY "${APPIMAGE_OUTPUT}")
     add_custom_target(appimage pwd
         COMMAND ${APPIMAGETOOL} ${APPIMAGE_OUTPUT}
-        WORKING_DIRECTORY "${CMAKE_BINARY_DIR}"
-        DEPENDS copy-desktop-appimage)
+        WORKING_DIRECTORY "${CMAKE_BINARY_DIR}")
 endfunction(target_appimage)
 
-function(add_appimage)
+# 3. 添加对目标的 Appimage 构建,Appimage 在一个项目中只能构建一个目标
+function(add_appimage_target _target)
     # check linuxdeploy
     if(NOT DEFINED LINUXDEPLOYQT)
         message("AppImage> Not Found LINUXDEPLOYQT Variable!")
         return()
     endif(NOT DEFINED LINUXDEPLOYQT)
-    if(CMAKE_VERSION VERSION_LESS 3.19 AND NOT EXISTS ${LINUXDEPLOYQT})
+    if(CMAKE_VERSION VERSION_LESS 3.19)
         message("> cmake version is less than 3.19")
-        message(WARNING "!Relative paths are not supported!")
+        if(CMAKE_VERSION VERSION_GREATER 3.4)
+            get_filename_component(LINUXDEPLOYQT_REAL_PATH ${LINUXDEPLOYQT} REALPATH)
+        else()
+            message("> cmake version is less than 3.4")
+            message(WARNING "!Relative paths are not supported!")
+        endif(CMAKE_VERSION VERSION_GREATER 3.4)
     else()
         file(REAL_PATH ${LINUXDEPLOYQT} LINUXDEPLOYQT_REAL_PATH)
-    endif(CMAKE_VERSION VERSION_LESS 3.19 AND NOT EXISTS ${LINUXDEPLOYQT})
+    endif(CMAKE_VERSION VERSION_LESS 3.19)
     message("AppImage> Found LINUXDEPLOYQT Variable: ${LINUXDEPLOYQT_REAL_PATH}")
 
     # check appimagetool
@@ -77,47 +101,58 @@ function(add_appimage)
         message("AppImage> Not Found APPIMAGETOOL Variable!")
         return()
     endif(NOT DEFINED APPIMAGETOOL)
-    if(CMAKE_VERSION VERSION_LESS 3.19 AND NOT EXISTS ${LINUXDEPLOYQT})
+    if(CMAKE_VERSION VERSION_LESS 3.19)
         # execute_process(COMMAND realpath ${APPIMAGETOOL} OUTPUT_VARIABLE APPIMAGETOOL_REAL_PATH)
         message("> cmake version is less than 3.19")
-        message(WARNING "!Relative paths are not supported!")
+        if(CMAKE_VERSION VERSION_GREATER 3.4)
+            get_filename_component(APPIMAGETOOL_REAL_PATH ${APPIMAGETOOL} REALPATH)
+        else()
+            message("> cmake version is less than 3.4")
+            message(WARNING "!Relative paths are not supported!")
+        endif(CMAKE_VERSION VERSION_GREATER 3.4)
     else()
         file(REAL_PATH ${APPIMAGETOOL} APPIMAGETOOL_REAL_PATH)
-    endif(CMAKE_VERSION VERSION_LESS 3.19 AND NOT EXISTS ${LINUXDEPLOYQT})
-    message("AppImage> Found APPIMAGETOOL Variable: ${LINUXDEPLOYQT_REAL_PATH}")
+    endif(CMAKE_VERSION VERSION_LESS 3.19)
+    message("AppImage> Found APPIMAGETOOL Variable: ${APPIMAGETOOL}")
 
     # do add_custome_target
     make_directory(${APPIMAGE_OUTPUT})
-    target_linuxdeploy()
+    target_linuxdeploy(${_target})
     target_appimage()
-endfunction(add_appimage)
 
-function(add_appimage_desktop)
-    configure_file(cmake/spark-appimage.desktop.in
-        ${CMAKE_BINARY_DIR}/spark-appimage.desktop @ONLY)
-endfunction(add_appimage_desktop)
+    # 重设目标输出的目录
+    set_target_properties(${_target}
+        PROPERTIES
+            RUNTIME_OUTPUT_DIRECTORY "${APPIMAGE_OUTPUT}")
 
-function(add_appimage_icon _ICON_PATH)
-    if(CMAKE_VERSION VERSION_LESS 3.21)
-        message("> cmake version is less than 3.21")
-        configure_file(${_ICON_PATH} ${CMAKE_BINARY_DIR}/spark-appimage.png COPYONLY)
-    else()
-        file(COPY_FILE ${_ICON_PATH} ${CMAKE_BINARY_DIR}/spark-appimage.png)
-    endif(CMAKE_VERSION VERSION_LESS 3.21)
-endfunction(add_appimage_icon _ICON_PATH)
+    # 为解决在不使用 -unsupported-allow-new-glibc 参数时,
+    # 可能不会生成 AppRun 软链接的问题
+    if(NOT USE_APPIMAGE_NEW_GLIBC)
+        set_target_properties(${_target}
+            PROPERTIES
+                RUNTIME_OUTPUT_NAME "AppRun")
+    endif(NOT USE_APPIMAGE_NEW_GLIBC)    
 
+endfunction(add_appimage_target _target)
 
 
-# 如果glic>=2.27,你就需要加上参数 -unsupported-allow-new-glibc (意思就是不再低版本发行版使用了)
-# 或 -unsupported-bundle-everything(大概的意思是尝试兼容,实际测试,到其他发行版直接用不了了,有可能是发行版的原因,还是建议用前者,虽然放弃了低版本)
+# 如果 glic>=2.27, 你就需要加上参数 -unsupported-allow-new-glibc 意思就是不再低版本发行版使用了
+# 或 -unsupported-bundle-everything
+# 大概的意思是尝试兼容,实际测试,到其他发行版直接用不了了,有可能是发行版的原因,还是建议用前者,虽然放弃了低版本
 
 # -unsupported-bundle-everything
-    # 捆绑所有依赖库,包括 ld-linux.so 加载器和 glibc。这将允许构建在较新系统上的应用程序在较旧的目标系统上运行,但不建议这样做,因为它会导致捆绑包超出所需的大小(并且可能到其他发行版无法使用)
+    # 捆绑所有依赖库,包括 ld-linux.so 加载器和 glibc。
+    # 这将允许构建在较新系统上的应用程序在较旧的目标系统上运行,
+        # 但不建议这样做,因为它会导致捆绑包超出所需的大小(并且可能到其他发行版无法使用)
 # -unsupported-allow-new-glibc
-    # 允许 linuxdeployqt 在比仍受支持的最旧 Ubuntu LTS 版本更新的发行版上运行。这将导致 AppImage无法在所有仍受支持的发行版上运行,既不推荐也不测试或支持
+    # 允许 linuxdeployqt 在比仍受支持的最旧 Ubuntu LTS 版本更新的发行版上运行。
+    # 这将导致 AppImage无法在所有仍受支持的发行版上运行,既不推荐也不测试或支持
 
-# ./linuxdeployqt-7-x86_64.AppImage 程序目录/程序 -appimage -unsupported-allow-new-glibc
-# ./linuxdeployqt-7-x86_64.AppImage 程序目录/程序 -appimage -unsupported-bundle-everything
+# 对 linuxdeployqt 的使用
+# ./linuxdeployqt-7-x86_64.AppImage 
+    # 程序目录/程序 -appimage -unsupported-allow-new-glibc
+# ./linuxdeployqt-7-x86_64.AppImage 
+    # 程序目录/程序 -appimage -unsupported-bundle-everything
 
 
 
@@ -126,7 +161,7 @@ endfunction(add_appimage_icon _ICON_PATH)
 # include(cmake/SparkAppimageConfig.cmake)  # 导入来自 Spark 构建的 Appimage 构建
 # add_appimage_icon(assets/spark.png)       # 添加到 Appimage 中的默认的图标
 # add_appimage_desktop()                    # 添加到 Appimage 中的默认desktop(使用来自 Spark 构建的 Desktop 构建中配置的信息(必须要求 spark-desktop))
-# add_appimage()                            # 应用对 Appimage 的构建
+# add_appimage_target(${PROJECT_NAME})      # 添加到 Appimage 中的默认目标,应用对 Appimage 的构建
 
 # 2. 在 Makefile 进行构建目标构建 Appimage 的构建流 --
 # 在 Makefile 进行构建目标构建 Appimage (要求提供工具的绝对路径,然后可依次进行linuxdeployqt, genrate-appimage)
@@ -150,3 +185,8 @@ endfunction(add_appimage_icon _ICON_PATH)
 # genrate-appimage:
 # 	cd build && cmake .. -DLINUXDEPLOYQT=$(LINUXDEPLOYQT) -DAPPIMAGETOOL=$(APPIMAGETOOL)
 # 	cd build && make appimage
+
+
+
+# NOTE:
+#  如果使用的库不存在于系统路径,则需要配置 export LD_LIBRARY_PATH=<路径> 以便 linuxdeployqt 可搜索到库的位置

+ 8 - 0
cmake/SparkBuildGraphviz.cmake

@@ -0,0 +1,8 @@
+cmake_minimum_required(VERSION 3.5.1)
+
+# 添加构建项目依赖图目标: make builddeps
+add_custom_target(builddeps
+    COMMAND "${CMAKE_COMMAND}" "--graphviz=graphviz/builddeps.dot" .
+    COMMAND dot -Tpng graphviz/builddeps.dot -o builddeps.png
+    WORKING_DIRECTORY "${CMAKE_BINARY_DIR}"
+)

+ 32 - 8
cmake/DebPackageConfig.cmake → cmake/SparkDebPackageConfig.cmake

@@ -1,7 +1,7 @@
 cmake_minimum_required(VERSION 3.0.0)
 
 # function(add_deb_package PACKAGE_NAME PACKAGE_VERSION PACKAGE_MAINTAINER PACKAGE_EMAIL PACKAGE_SHORT_DESCRIPTION PACKAGE_LONG_DESCRIPTION)
-
+    
 # endfunction(add_deb_package PACKAGE_NAME PACKAGE_VERSION PACKAGE_MAINTAINER PACKAGE_EMAIL PACKAGE_SHORT_DESCRIPTION PACKAGE_LONG_DESCRIPTION)
 
 # if(add_deb_package    VALUE) set(Package    ${VALUE} PARENT_SCOPE) endif(add_deb_package    VALUE)
@@ -119,7 +119,7 @@ function(set_package_vars _IN_KEY _IN_VAL)
         else()
             set(CPACK_DEBIAN_PACKAGE_VERSION "${_IN_VAL}" PARENT_SCOPE)
         endif(_IN_VAL STREQUAL "auto")
-
+        
         message("--> 软件版本: ${_IN_VAL}")
     endif(_Version EQUAL "0")
 
@@ -137,7 +137,7 @@ function(set_package_vars _IN_KEY _IN_VAL)
 
     find_str("${_IN_KEY}" "Architecture" _Architecture)
     if(_Architecture EQUAL "0")
-        set(CPACK_DEBIAN_PACKAGE_ARCHITECTURE "${_IN_VAL}" PARENT_SCOPE)
+        set(CPACK_DEBIAN_PACKAGE_ARCHITECTURE "${_IN_VAL}" PARENT_SCOPE)    
         if(_IN_VAL STREQUAL "auto")
             execute_process(
                 COMMAND dpkg --print-architecture
@@ -148,7 +148,7 @@ function(set_package_vars _IN_KEY _IN_VAL)
         endif(_IN_VAL STREQUAL "auto")
         message("--> 软件架构: ${_IN_VAL}")
     endif(_Architecture EQUAL "0")
-
+    
     find_str("${_IN_KEY}" "Priority" _Priority)
     if(_Priority EQUAL "0")
         set(CPACK_DEBIAN_PACKAGE_PRIORITY "${_IN_VAL}" PARENT_SCOPE)
@@ -179,6 +179,12 @@ function(set_package_vars _IN_KEY _IN_VAL)
         message("--> 软件建议: ${_IN_VAL}")
     endif(_Recommends EQUAL "0")
 
+    find_str("${_IN_KEY}" "Conflicts" _Conflicts)
+    if(_Conflicts EQUAL "0")
+        set(CPACK_DEBIAN_PACKAGE_CONFLICTS "${_IN_VAL}" PARENT_SCOPE)
+        message("--> 软件冲突: ${_IN_VAL}")
+    endif(_Conflicts EQUAL "0")
+    
 endfunction(set_package_vars _IN_KEY _IN_VAL)
 
 # 定义一个自定义(add_package_descript)函数
@@ -194,7 +200,7 @@ function(add_package_descript IN_DES)
         message(FATAL_ERROR "!! Not Found Path: ${PACKAGE_DES_PATH}")
         return()
     endif(EXISTS ${IN_DES})
-
+    
     file(READ ${PACKAGE_DES_PATH} DES_CONTENT)
     trim_str("${DES_CONTENT}" DES_CONTENT)
 
@@ -244,7 +250,12 @@ function(add_package_descript IN_DES)
         set(PREV_DES_LINE "")
         while(NOT PREV_DES_LINE STREQUAL DES_LINE)
             if(NOT PREV_DES_LINE STREQUAL "")
-                set(Descrition "${Descrition}\n${DES_LINE}")
+                if ("${CMAKE_VERSION}" VERSION_LESS "3.15")
+                    set(Descrition "${Descrition}\n${DES_LINE}")
+                else()
+                    string(STRIP "${DES_LINE}" STRIP_DES_LINE)
+                    set(Descrition "${Descrition}\n${STRIP_DES_LINE}")
+                endif("${CMAKE_VERSION}" VERSION_LESS "3.15")
             endif(NOT PREV_DES_LINE STREQUAL "")
             set(PREV_DES_LINE "${DES_LINE}")
             sub_next(${DES_CONTENT} NEXT_INDEX DES_LINE DES_CONTENT)
@@ -284,16 +295,28 @@ function(add_package_descript IN_DES)
     endif("${OSDVer}" STREQUAL "true")
     
 
+
     ##################### deb file name #####################
     set(_Package      "${CPACK_DEBIAN_PACKAGE_NAME}")
     set(_Version      "${CPACK_DEBIAN_PACKAGE_VERSION}")
     set(_Architecture "${CPACK_DEBIAN_PACKAGE_ARCHITECTURE}")
 
-    set(_DebFileName
+    set(_DebFileName 
         "${_Package}_${_Version}_${_Architecture}${PACKAGE_SUFFIX}.deb"
     )
     set(CPACK_DEBIAN_FILE_NAME            ${_DebFileName})
 
+    # 标识: spark-deb-package
+    if(NOT "${PACKAGE_SUFFIX}" STREQUAL "")
+        # eg: remove '_' of '_Debian' 
+        string(SUBSTRING "${PACKAGE_SUFFIX}" 1 -1 DISTRIBUTION)
+        if ("${CMAKE_VERSION}" VERSION_LESS "3.15")
+            set(CPACK_DEBIAN_PACKAGE_DESCRIPTION "${Descrition}\n .\n Build for ${DISTRIBUTION} through spark-deb-build.")
+        else()
+            set(CPACK_DEBIAN_PACKAGE_DESCRIPTION ${Descrition} "\n.\nBuild for ${DISTRIBUTION} through spark-deb-build.")
+        endif("${CMAKE_VERSION}" VERSION_LESS "3.15")
+        
+    endif(NOT "${PACKAGE_SUFFIX}" STREQUAL "")
 
     # set(CPACK_DEBIAN_PACKAGE_NAME         "${Package}")
     # set(CPACK_DEBIAN_PACKAGE_VERSION      "${Version}")
@@ -326,7 +349,7 @@ endfunction(add_package_descript IN_DES)
 # CPACK_DEBIAN_FILE_NAME                - n
 # CPACK_DEBIAN_PACKAGE_NAME             - y
 # CPACK_DEBIAN_PACKAGE_VERSION          - y
-# CPACK_DEBIAN_PACKAGE_ARCHITECTURE     - y(auto)
+# CPACK_DEBIAN_PACKAGE_ARCHITECTURE     - y(auto) -> dpkg --print-architecture
 # CPACK_DEBIAN_PACKAGE_DEPENDS          - y
 # CPACK_DEBIAN_PACKAGE_PRIORITY         - y
 # CPACK_DEBIAN_PACKAGE_MAINTAINER       - y
@@ -339,4 +362,5 @@ endfunction(add_package_descript IN_DES)
 #     set(ARCHITECTURE "arm64")
 # endif()
 
+
 # string(TIMESTAMP BUILD_TIME "%Y%m%d")

+ 29 - 15
cmake/SparkDesktopMacros.cmake

@@ -1,17 +1,19 @@
+# SparkDesktopMacros.cmake
 
-macro(spark_desktop_macros _APP_NAME _APP_NAME_ZH_CN _APP_COMMENT _APP_TYPE _APP_EXECUTE_PATH _APP_EXECUTE_ICON_PATH _APP_CATEGORIES)
-    set(APP_NAME               ${_APP_NAME})
-    set(APP_NAME_ZH_CN         ${_APP_NAME_ZH_CN})
-    set(APP_COMMENT            ${_APP_COMMENT})
-    set(APP_TYPE               ${_APP_TYPE})
-    set(APP_EXECUTE_PATH       ${_APP_EXECUTE_PATH})
-    set(APP_EXECUTE_ICON_PATH  ${_APP_EXECUTE_ICON_PATH})
-    set(APP_CATEGORIES         ${_APP_CATEGORIES})
-    configure_file(cmake/spark-desktop.desktop.in
-        ${CMAKE_BINARY_DIR}/${_APP_NAME}.desktop
+macro(spark_desktop_macros)
+    set(APP_NAME              ${ARGV0})
+    set(APP_NAME_ZH_CN        ${ARGV1})
+    set(APP_COMMENT           ${ARGV2})
+    set(APP_TYPE              ${ARGV3})
+    set(APP_EXECUTE_PATH      ${ARGV4})
+    set(APP_EXECUTE_ICON_PATH ${ARGV5})
+    set(APP_CATEGORIES        ${ARGV6})
+    set(APP_MIME_TYPE         ${ARGV7})
+    configure_file(cmake/spark-desktop.desktop.in.txt
+        ${CMAKE_BINARY_DIR}/${ARGV0}.desktop
     )
-    set(SPARK_DESKTOP_FILE ${CMAKE_BINARY_DIR}/${_APP_NAME}.desktop)
-endmacro(spark_desktop_macros _APP_NAME _APP_NAME_ZH_CN _APP_COMMENT _APP_TYPE _APP_EXECUTE_PATH _APP_EXECUTE_ICON_PATH _APP_CATEGORIES)
+    set(SPARK_DESKTOP_FILE ${CMAKE_BINARY_DIR}/${ARGV0}.desktop)
+endmacro(spark_desktop_macros)
 
 # include(cmake/SparkDesktopMacros.cmake)
 # 内容默认应用名称: Name= 应与项目名称相同
@@ -22,15 +24,27 @@ endmacro(spark_desktop_macros _APP_NAME _APP_NAME_ZH_CN _APP_COMMENT _APP_TYPE _
     # 应用类型: Type=
     # 执行程序: Exec=
     # 图标路径: Icon=
-    # 应用分类: Category=
+    # 应用分类: Categories=
+    # MIME类型: MimeType=
 # )
 
+# TODO 安装位置:INSTALL(将自动实现 install 文件,如 /usr/share/applications)
+
+# install(FILES ${APP_NAME}.desktop
+#     DESTINATION /usr/share/applications
+# )
+    # 或者
+# install(FILES ${SPARK_DESKTOP_FILE}
+#     DESTINATION /usr/share/applications
+# )
+
+# 基于 configure_file 填充内容配置
 # configure_file(<input> <output>
 #                [NO_SOURCE_PERMISSIONS | USE_SOURCE_PERMISSIONS |
 #                 FILE_PERMISSIONS <permissions>...]
 #                [COPYONLY] [ESCAPE_QUOTES] [@ONLY]
 #                [NEWLINE_STYLE [UNIX|DOS|WIN32|LF|CRLF] ])
 
-# install(FILES ${APP_NAME}.desktop
+# install(FILES ${SPARK_DESKTOP_FILE}.desktop
 #     DESTINATION /usr/share/applications
-# )
+# )

+ 8 - 2
cmake/SparkEnvConfig.cmake

@@ -1,16 +1,22 @@
 cmake_minimum_required(VERSION 3.5.1)
 
 set(CMAKE_EXPORT_COMPILE_COMMANDS ON)
-# set(CMAKE_INCLUDE_CURRENT_DIR ON)
+set(CMAKE_INCLUDE_CURRENT_DIR ON)
 set(CMAKE_AUTOMOC ON)
 set(CMAKE_AUTOUIC ON)
 set(CMAKE_AUTORCC ON)
 # set(CMAKE_BUILD_TYPE "Debug")
 
 option(SPARK_DEBUG_MESSAGE "CMake Spark Module Debug Message." OFF)
+set(SPAKK_DEBUG_LOGFILE "${CMAKE_BINARY_DIR}/spark_debug.log" CACHE STRING "Spark Build Debug logfile." FORCE)
+file(WRITE ${SPAKK_DEBUG_LOGFILE})
 
 macro(spark_debug_message)
     if(SPARK_DEBUG_MESSAGE)
-        message(${ARGN})
+        set(SPARK_ONECE_LOG ${ARGN})
+        message("[SPARK_MESSAGE]: " ${SPARK_ONECE_LOG})
+        file(APPEND ${SPAKK_DEBUG_LOGFILE} ${SPARK_ONECE_LOG} "\n")
+        unset(SPARK_ONECE_LOG)
     endif(SPARK_DEBUG_MESSAGE)
 endmacro(spark_debug_message)
+

+ 11 - 0
cmake/SparkFindDtkConfig.cmake

@@ -0,0 +1,11 @@
+cmake_minimum_required(VERSION 3.5.1)
+
+# include(SparkFindQt5Config.cmake)
+find_package(Dtk COMPONENTS Core Widget Gui)
+
+function(target_link_dtk NAME)
+    target_link_libraries(${NAME} 
+        ${DtkCore_LIBRARIES}
+        ${DtkWidget_LIBRARIES}
+        ${DtkGui_LIBRARIES})
+endfunction(target_link_dtk NAME)

+ 7 - 0
cmake/SparkFindLibraries.cmake

@@ -0,0 +1,7 @@
+cmake_minimum_required(VERSION 3.5.1)
+
+# spark_find_library(notify libnotify)
+
+# function(target_link_${_prefix} TARGET)
+#     target_link_libraries(${TARGET} ${_prefix})
+# endfunction(target_link_${_prefix} TARGET)

+ 242 - 43
cmake/SparkMacrosConfig.cmake

@@ -2,6 +2,48 @@ cmake_minimum_required(VERSION 3.5.1)
 
 # 定义一些 macro 用于自动生成构建结构
 
+# spark_aux_source_directory outvar invar [skip]
+# 获取目录下的所有源代码
+macro(spark_aux_source_directory OUTVAR INVAR)
+    # iv: internal_variable
+    set(iv_args ${ARGN})
+    list(LENGTH iv_args iv_arglen)
+    
+    file(GLOB iv_SOURCE_LIST RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} ${INVAR}/*.c ${INVAR}/*.cpp)
+    file(GLOB iv_HEADER_LIST RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} ${INVAR}/*.h ${INVAR}/*.hpp)
+    file(GLOB iv_QT_UI_LIST RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} ${INVAR}/*.ui ${INVAR}/*.qrc)
+
+    if(iv_arglen EQUAL 1)
+        list(APPEND ${OUTVAR} ${iv_SOURCE_LIST} ${iv_HEADER_LIST} ${iv_QT_UI_LIST})
+    else()
+        set(${OUTVAR} ${iv_SOURCE_LIST} ${iv_HEADER_LIST} ${iv_QT_UI_LIST})
+    endif(iv_arglen EQUAL 1)
+
+    unset(iv_args)
+    unset(iv_arglen)
+    unset(iv_SOURCE_LIST)
+    unset(iv_HEADER_LIST)
+    unset(iv_QT_UI_LIST)
+
+endmacro(spark_aux_source_directory OUTVAR INVAR)
+
+# spark_aux_source_directories outvar invar [...]
+# 获取目录列表下的所有源代码
+    # spark_aux_source_directory 的扩展,支持多个 invar 与追加参数
+macro(spark_aux_source_directories OUTVAR INVAR)
+    set(iv_aux_directories ${ARGN})
+    
+    spark_aux_source_directory(${OUTVAR} ${INVAR})
+
+    foreach(iv_directory IN LISTS iv_aux_directories)
+        spark_aux_source_directory(${OUTVAR} ${iv_directory} SKIP)
+    endforeach(iv_directory IN LISTS iv_aux_directories)
+
+    unset(iv_aux_directories)
+
+endmacro(spark_aux_source_directories OUTVAR INVAR)
+
+
 # spark_add_library <lib_name> [files]...
 # 构建一个库,基于指定的源文件
     # 并根据库名生成 target_link_<lib_name> 函数
@@ -27,44 +69,124 @@ endmacro(spark_add_library _lib_name)
         # 函数内增加以 <lib_path> 头文件搜索路径
 macro(spark_add_library_path _lib_name _lib_path)
 
-    set(${_lib_name}_SOURCE_PATH ${_lib_path})
+    # 0. 建立初始变量体系
     set(${_lib_name}_TYPE)
-    if(${${_lib_name}_SOURCE_PATH} STREQUAL SHARED OR ${${_lib_name}_SOURCE_PATH} STREQUAL STATIC)
-        set(${_lib_name}_SOURCE_PATH ${ARGV2})
+    set(${_lib_name}_TYPE_MESSAGE "STATIC(Default)")
+    set(${_lib_name}_ARGN ${ARGN})
+
+    # 1. 判断 _lib_path 是否是 SHARED 或 STATIC
+    if(${_lib_path} STREQUAL SHARED OR ${_lib_path} STREQUAL STATIC)
         set(${_lib_name}_TYPE ${_lib_path})
-        spark_debug_message("_lib_path: ${${_lib_name}_SOURCE_PATH}(${ARGV2})[${${_lib_name}_TYPE}]")
+        set(${_lib_name}_TYPE_MESSAGE ${${_lib_name}_TYPE})
 
         if(${ARGC} LESS 3)
             message(FATAL_ERROR "Missing parameter, library path not specified.")
         endif(${ARGC} LESS 3)
-    endif(${${_lib_name}_SOURCE_PATH} STREQUAL SHARED OR ${${_lib_name}_SOURCE_PATH} STREQUAL STATIC)
+    else()
+        # 如没有则将 _lib_path 加入到 ARGN 
+        list(APPEND ${_lib_name}_ARGN ${_lib_path})
+    endif(${_lib_path} STREQUAL SHARED OR ${_lib_path} STREQUAL STATIC)
 
-    aux_source_directory(${${_lib_name}_SOURCE_PATH} ${_lib_name}_SOURCES)
+    # 1. 处理由 spark_add_library_realpaths 构建转本构建时的清洗机制
+    spark_debug_message("> Building: ${_lib_name}, type: ${${_lib_name}_TYPE_MESSAGE}")
+    set(${_lib_name}_ARGN_REF ${${_lib_name}_ARGN})
+    unset(${_lib_name}_ARGN)
+    foreach(_old IN LISTS ${_lib_name}_ARGN_REF)
+        set(_new ${_old})
+        string(FIND "${_old}" "+" _plus_index)
+        if(${_plus_index} GREATER 0)
+            string(SUBSTRING "${_old}" 0 ${_plus_index} _new)
+            spark_debug_message("  [CONVERT] ${_new} <- ${_old}")
+        endif(${_plus_index} GREATER 0)
+        list(APPEND ${_lib_name}_ARGN ${_new})
+    endforeach(_old IN LISTS ${_lib_name}_ARGN_REF)
+    
 
-    # message("================ spark_add_library_path: ${_lib_name} ================")
-    file(GLOB UI_LIST RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} ${${_lib_name}_SOURCE_PATH}/*.ui)
-    add_library(${_lib_name} ${${_lib_name}_TYPE} ${${_lib_name}_SOURCES} ${UI_LIST})
-    spark_debug_message("${_lib_name}_SOURCES: ${${_lib_name}_SOURCES}, ${${_lib_name}_SOURCE_PATH}")
-    foreach(item IN LISTS ${_lib_name}_SOURCES)
-        spark_debug_message(" -> ${item}")
-    endforeach(item IN LISTS ${_lib_name}_SOURCES)
+    # 2.目标参数项分析出子项
+    # 拆分出源代码、路径、未知项等
+    set(${_lib_name}_ARGN_SOURCES)
+    set(${_lib_name}_ARGN_APPEND_PATHS)
+    set(${_lib_name}_ARGN_UNKNOW)
+    foreach(item IN LISTS ${_lib_name}_ARGN)
+        spark_debug_message("  [ARGN] check:" ${item})
+        if(NOT EXISTS ${item})
+            set(item ${CMAKE_CURRENT_LIST_DIR}/${item})
+        endif()
+        if(EXISTS ${item})
+            # spark_debug_message("       exists: true")
+            file(REAL_PATH ${item} ${_lib_name}_ARGN_item)
+            if(IS_DIRECTORY  ${${_lib_name}_ARGN_item})
+                list(APPEND ${_lib_name}_ARGN_APPEND_PATHS ${item})
+            else()
+                list(APPEND ${_lib_name}_ARGN_SOURCES ${item})
+            endif(IS_DIRECTORY  ${${_lib_name}_ARGN_item})
+        else()
+            list(APPEND ${_lib_name}_ARGN_UNKNOW ${item})
+            spark_debug_message("       exists: false")
+        endif()
+    endforeach()
+
+    list(LENGTH ${_lib_name}_ARGN_SOURCES      ${_lib_name}_ARGN_SOURCES_LENGTH)
+    list(LENGTH ${_lib_name}_ARGN_APPEND_PATHS ${_lib_name}_ARGN_APPEND_PATHS_LENGTH)
+    list(LENGTH ${_lib_name}_ARGN_UNKNOW       ${_lib_name}_ARGN_UNKNOW_LENGTH)
+    spark_debug_message("       result: files(${${_lib_name}_ARGN_SOURCES_LENGTH}), paths(${${_lib_name}_ARGN_APPEND_PATHS_LENGTH}), unknow(${${_lib_name}_ARGN_UNKNOW_LENGTH})" ${item})
 
+    # 3. 获取所以源代码为 any_files
+    spark_debug_message("  files:")
+    set(any_files ${${_lib_name}_ARGN_SOURCES})
+    foreach(item IN LISTS ${_lib_name}_ARGN_APPEND_PATHS)
+        spark_aux_source_directory(item_files ${item})
+        list(APPEND any_files ${item_files})
+        foreach(item_file IN LISTS item_files)
+            spark_debug_message("       ${item_file}")
+        endforeach(item_file IN LISTS item_files)
+    endforeach(item IN LISTS ${_lib_name}_ARGN_APPEND_PATHS)
+
+    # 4. 构建目标库
+    add_library(${_lib_name} ${${_lib_name}_TYPE} 
+        ${${_lib_name}_ARGN_SOURCES}
+        ${any_files})
+
+    # 5. 建立引用点 
+        # target_link_<_lib_name> 函数
+        # target_include_<_lib_name> 函数
+        
+        # target_<_lib_name>_include 函数
+        # target_<_lib_name>_link 函数
+
+    function(target_${_lib_name}_include _include)
+        spark_debug_message("添加引用: ${_lib_name} <- ${_include} ${${_lib_name}_INCLUDE_ARGN}")
+        target_include_directories(${_lib_name} PRIVATE ${_include})
+    endfunction(target_${_lib_name}_include _include)
+
+    function(target_${_lib_name}_link _library)
+        spark_debug_message("添加链接: ${_lib_name} <- ${_library} ${${_lib_name}_LINK_ARGN}")
+        target_link_libraries(${_lib_name} ${_library})
+    endfunction(target_${_lib_name}_link _library)
+    
     function(target_link_${_lib_name} TARGET)
-        # spark_debug_message("target_link_${_lib_name}")
-        spark_debug_message(" -> (include): ${${_lib_name}_SOURCE_PATH}")
-        target_include_directories(${TARGET} PUBLIC "${${_lib_name}_SOURCE_PATH}")
+        spark_debug_message("链接引用: ${TARGET} <- ${_lib_name}")
+        target_include_directories(${TARGET} PRIVATE 
+            "${${_lib_name}_SOURCE_PATH}" ${${_lib_name}_ARGN_APPEND_PATHS})
         target_link_libraries(${TARGET} ${_lib_name})
     endfunction(target_link_${_lib_name} TARGET)
 
     function(target_include_${_lib_name} TARGET)
-        # spark_debug_message("target_link_${_lib_name}")
-        spark_debug_message(" -> (include): ${${_lib_name}_SOURCE_PATH}")
-        target_include_directories(${TARGET} PUBLIC "${${_lib_name}_SOURCE_PATH}")
-        # target_link_libraries(${TARGET} ${_lib_name})
+        spark_debug_message("引入引用: ${TARGET} <- ${_lib_name}")
+        target_include_directories(${TARGET} PUBLIC 
+            "${${_lib_name}_SOURCE_PATH}" ${${_lib_name}_ARGN_APPEND_PATHS})
     endfunction(target_include_${_lib_name} TARGET)
 
-    # file(GLOB HEADER_LIST RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} ${${_lib_name}_SOURCE_PATH}/*.h)
-    target_include_directories(${_lib_name} PUBLIC "${${_lib_name}_SOURCE_PATH}")
+    
+    target_include_directories(${_lib_name} PRIVATE 
+        "${${_lib_name}_ARGN_APPEND_PATHS}")
+
+    # 输出 includes
+    spark_debug_message("  ${_lib_name}_ARGN_APPEND_PATHS: ")
+    foreach(item IN LISTS ${_lib_name}_ARGN_APPEND_PATHS)
+        string(REPLACE "${CMAKE_SOURCE_DIR}/" "" item_var "${item}")
+        spark_debug_message("        ${item_var}")
+    endforeach(item IN LISTS ${_lib_name}_ARGN_APPEND_PATHS)
 
     # 如果想用以下操作手动实现 target_link_include_directories
         # 请注意对 LIST 类型使用 "" 进行包围
@@ -88,29 +210,105 @@ endmacro(spark_add_library_path _lib_name _lib_path)
     # Qt编译时源文件包括很多类型,需要指定 *.h/*.cpp/*.qrc/*.qm/... 等
 macro(spark_add_executable _exec_name)
 
-    spark_debug_message("================ ${_exec_name} Executable ================")
+    set(${_exec_name}_TYPE_MESSAGE "可执行程序")
+    spark_debug_message("> Building: ${_exec_name}, type: ${${_exec_name}_TYPE_MESSAGE}")
+    
     add_executable(${_exec_name} ${ARGN})
 
 endmacro(spark_add_executable _exec_name)
 
+# spark_add_executable_path <target> <path> [files ... paths]
+# 构建一个可执行程序,基于指定的路径
 macro(spark_add_executable_path _exec_name _exec_path)
-    aux_source_directory(${_exec_path} ${_exec_name}_SOURCES)
+    spark_add_executable(${_exec_name})
 
-    spark_debug_message("================ ${_exec_name} Executable ================")
-    file(GLOB UI_LIST RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} ${_exec_path}/*.ui)
-    add_executable(${_exec_name} ${${_exec_name}_SOURCES} ${ARGN} ${UI_LIST})
-    foreach(item IN LISTS ${_exec_name}_SOURCES)
-        spark_debug_message(" -> ${item}")
-    endforeach(item IN LISTS ${_exec_name}_SOURCES)
+    # 0. 建立初始变量体系
+    # set(${_exec_name}_TYPE)
+    # set(${_exec_name}_TYPE_MESSAGE "可执行程序")
+    set(${_exec_name}_ARGN ${ARGN})
+
+    # 1. 处理由 spark_add_executable_realpaths 构建转本构建时的清洗机制
+    # spark_debug_message("> Building: ${_exec_name}, type: ${${_exec_name}_TYPE_MESSAGE}")
+    set(${_exec_name}_ARGN_REF ${${_exec_name}_ARGN})
+    unset(${_exec_name}_ARGN)
+    foreach(_old IN LISTS ${_exec_name}_ARGN_REF)
+        set(_new ${_old})
+        string(FIND "${_old}" "+" _plus_index)
+        if(${_plus_index} GREATER 0)
+            string(SUBSTRING "${_old}" 0 ${_plus_index} _new)
+            spark_debug_message("  [CONVERT] ${_new} <- ${_old}")
+        endif(${_plus_index} GREATER 0)
+        list(APPEND ${_exec_name}_ARGN ${_new})
+    endforeach(_old IN LISTS ${_exec_name}_ARGN_REF)
+
+    # 1.目标参数项分析出子项
+    # 拆分出源代码、路径、未知项等
+    # spark_debug_message("> Building: ${_exec_name}, type: ${${_exec_name}_TYPE_MESSAGE}")
+    set(${_exec_name}_ARGN_SOURCES)
+    set(${_exec_name}_ARGN_APPEND_PATHS ${_exec_path})
+    set(${_exec_name}_ARGN_UNKNOW)
+    foreach(item IN LISTS ${_exec_name}_ARGN)
+        spark_debug_message("  [ARGN] check:" ${item})
+        if(NOT EXISTS ${item})
+            set(item ${CMAKE_CURRENT_LIST_DIR}/${item})
+        endif()
+        if(EXISTS ${item})
+            # spark_debug_message("       exists: true")
+            file(REAL_PATH ${item} ${_exec_name}_ARGN_item)
+            if(IS_DIRECTORY  ${${_exec_name}_ARGN_item})
+                list(APPEND ${_exec_name}_ARGN_APPEND_PATHS ${item})
+            else()
+                list(APPEND ${_exec_name}_ARGN_SOURCES ${item})
+            endif(IS_DIRECTORY  ${${_exec_name}_ARGN_item})
+        else()
+            list(APPEND ${_exec_name}_ARGN_UNKNOW ${item})
+            spark_debug_message("       exists: false")
+        endif()
+    endforeach()
+
+    list(LENGTH ${_exec_name}_ARGN_SOURCES      ${_exec_name}_ARGN_SOURCES_LENGTH)
+    list(LENGTH ${_exec_name}_ARGN_APPEND_PATHS ${_exec_name}_ARGN_APPEND_PATHS_LENGTH)
+    list(LENGTH ${_exec_name}_ARGN_UNKNOW       ${_exec_name}_ARGN_UNKNOW_LENGTH)
+    spark_debug_message("       result: files(${${_exec_name}_ARGN_SOURCES_LENGTH}), paths(${${_exec_name}_ARGN_APPEND_PATHS_LENGTH}), unknow(${${_exec_name}_ARGN_UNKNOW_LENGTH})" ${item})
+
+
+    # 2. 获取所以源代码为 any_files
+    spark_debug_message("  files:")
+    set(any_files ${${_exec_name}_ARGN_SOURCES})
+    foreach(item IN LISTS ${_exec_name}_ARGN_APPEND_PATHS)
+        spark_aux_source_directory(item_files ${item})
+        list(APPEND any_files ${item_files})
+        foreach(item_file IN LISTS item_files)
+            spark_debug_message("       ${item_file}")
+        endforeach(item_file IN LISTS item_files)
+    endforeach(item IN LISTS ${_exec_name}_ARGN_APPEND_PATHS)
+ 
+    # 3. 构建可执行目标所需要的文件
+    # add_executable(${_exec_name} 
+    #     ${${_exec_name}_ARGN_SOURCES}
+    #     ${any_files})
+
+    target_sources(${_exec_name} PRIVATE
+        ${${_exec_name}_ARGN_SOURCES}
+        ${any_files})
 
-    # function(target_link_${_exec_name} TARGET)
-    #     spark_debug_message("target_link_${_lib_name}")
-    spark_debug_message(" -> (include): ${_exec_path}")
-    target_include_directories(${_exec_name} PUBLIC "${_exec_path}")
-        # target_link_libraries(${TARGET} ${_lib_name})
-    # endfunction(target_link_${_exec_name} TARGET)
-    # target_link_${_exec_name}(${_exec_name})
+     # 4. 建立引用点 
+        # target_<_exec_name>_include 函数
+        # target_<_exec_name>_link 函数
+    function(target_${_exec_name}_include _include)
+        spark_debug_message("添加引用: ${_exec_name} <- ${_include} ${${_exec_name}_INCLUDE_ARGN}")
+        target_include_directories(${_exec_name} PRIVATE ${_include})
+    endfunction(target_${_exec_name}_include _include)
 
+    function(target_${_exec_name}_link _library)
+        spark_debug_message("添加链接: ${_exec_name} <- ${_library} ${${_exec_name}_LINK_ARGN}")
+        target_link_libraries(${_exec_name} ${_library})
+    endfunction(target_${_exec_name}_link _library)
+    
+    target_include_directories(${_exec_name} PRIVATE 
+        ${_exec_path})
+    spark_debug_message("  include: ${_exec_path}\n")
+    
 endmacro(spark_add_executable_path _exec_name _exec_path)
 
 # spark_find_library
@@ -152,10 +350,11 @@ endmacro(spark_add_executable_paths _prefix_path)
     # ARGN: 此宏剩余的参数列表
         # 在使用 target_link_<name> 时
         # _NAME: 用于此 fucntion 中的要求参数: <_NAME>目标将要连接此库
-macro(spark_add_link _IN_NAME)
-    function(target_link_${_IN_NAME} _NAME)
-        spark_debug_message("LINK ${_NAME} ${ARGN}")
-        target_link_libraries(${_NAME}
+macro(spark_add_link _name)
+    function(target_link_${_name} _link)
+        spark_debug_message("> Linking: ${_link}")
+        spark_debug_message("        <- ${ARGN}\n")
+        target_link_libraries(${_link}
             ${ARGN})
-    endfunction(target_link_${_IN_NAME} _NAME)
-endmacro(spark_add_link _IN_NAME)
+    endfunction(target_link_${_name} _link)
+endmacro(spark_add_link _name)

+ 248 - 150
cmake/SparkMacrosExtendConfig.cmake

@@ -4,171 +4,164 @@
 function(find_plus INVAL OUTVAL)
     string(FIND "${INVAL}" "+" plus_index)
     set(${OUTVAL} ${plus_index} PARENT_SCOPE)
-    # if(plus_index LESS 0)
-    #     set(${OUTVAL} -1 PARENT_SCOPE)
-    # else()
-    #     set(${OUTVAL} ${plus_index} PARENT_SCOPE)
-    # endif(plus_index LESS 0)
 endfunction(find_plus INVAL OUTVAL)
 
-# find_plus("FF" FFFF)
-# message("--> FFFF ${FFFF}")  # --> FFFF -1
-# find_plus("F+F" FFFF)
-# message("--> FFFF ${FFFF}")  # --> FFFF 1
-# find_plus("+F+F" FFFF)
-# message("--> FFFF ${FFFF}")  # --> FFFF 0
-
-# set(FFF)
-# list(APPEND FFFF )
-# list(APPEND FFFF "F")
-# list(APPEND FFFF "FA")
-# message("--> FFFF: ${FFFF}")  # --> FFFF: F;FA
-
-# set(FFFFS "")
-# list(APPEND FFFFS ${FFFF})
-# message("--> FFFFS: ${FFFFS}")  # --> FFFFS: F;FA
-
-# set(FFFF "+AA+BB+CC+DD")
-# string(REPLACE "+" ";" FFFFL "${FFFF}")
-# list(LENGTH FFFFL FFFFLEN)
-# message("--> FFFFL: ${FFFFL} --> ${FFFFLEN}") # --> FFFFL: F;
-
-# plus_list
-# 将传入的 "+AAA+BBB+CCC" 类型数据变成一个 列表(list)
-# 适用于不使用 string 进行替换 + 为 ";" 的情况下使用直接变成 list
-function(plus_list INVAL OUTVAL OUTVALLEN)
-    # set(${OUTVAL} "..." PARENT_SCOPE)
-    # set(${OUTVALLEN} 0 PARENT_SCOPE)
-
-    set(_tmps "")       # 设置为空的
-
-    # 寻找下一个 + 位置
-    find_plus(${INVAL} RIGHT_PLUS)
-
-    string(LENGTH "${INVAL}" INVALLEN)
-    spark_debug_message("--> 传入的 INVAL: --> 内容: ${INVAL}")
-    spark_debug_message("--> 传入的 INVAL: --> 长度: ${INVALLEN}")
-    spark_debug_message("--> 传入的 INVAL: --> +位置: ${RIGHT_PLUS}")
-
-    # 判断是否有右侧 + 号
-    if(RIGHT_PLUS LESS 0)
-        spark_debug_message("--> 传入的 INVAL: --> 无需计算新的+位置")
-        # spark_debug_message("--> 计算新的 + 位置: ${_PLUSINDEX}")
-        list(APPEND _tmps ${INVAL})
-    else()
-        math(EXPR _PLUSINDEX "${RIGHT_PLUS}+1")
-        spark_debug_message("--> 传入的 INVAL: --> 需计算+位置 --> 右移: ${_PLUSINDEX}")
-
-        string(SUBSTRING "${INVAL}" ${_PLUSINDEX} ${INVALLEN} NewVal)
-        spark_debug_message("--> 传入的 INVAL: --> 需计算+位置 --> 右移: ${_PLUSINDEX} -> 内容: ${NewVal}")
-        # string(REPLACE "+" ";" _tmps "${NewVal}")
-        # list(LENGTH FFFFL FFFFLEN)
-
-        # spark_debug_message("--> 计算新的 + 位置: ${_PLUSINDEX} --> 后面的 NewVal: ${NewVal}")
-
-        # find_plus(${NewVal} _NextPlus)
-        # if(_NextPlus LESS 0)
-            # list(APPEND _tmps ${NewVal})
-            # spark_debug_message("--> 追加新的 + 位置: ${_PLUSINDEX} --> 后面的")
-        # else()
-        #     spark_debug_message("--> 追加新的 + 位置: ${_PLUSINDEX} --> 后面的")
-        #     # 重新
-        #     # plus_list(${NewVal} NewValS )
-        #     # foreach(item)
-        #         # list(APPEND _tmps ${item})
-        #     # endforeach(item)
-        # endif(_NextPlus LESS 0)
-    endif(RIGHT_PLUS LESS 0)
-
-    set(${OUTVAL} ${_tmps} PARENT_SCOPE)
-    list(LENGTH _tmps _tmps_len)
-    set(${OUTVALLEN} ${_tmps_len} PARENT_SCOPE)
-
-endfunction(plus_list INVAL OUTVAL OUTVALLEN)
-
-# plus_list("+AAA+BBB+CCC+DDD" FFF FFLEN)
-# spark_debug_message("--------> ${FFF}: -> ${FFLEN}")
+function(find_plus_v INVAL OUTVAL)
+    string(FIND "${${INVAL}}" "+" plus_index)
+    set(${OUTVAL} ${plus_index} PARENT_SCOPE)
+endfunction(find_plus_v INVAL OUTVAL)
+
+function(find_colon INVAL OUTVAL)
+    string(FIND "${INVAL}" ":" colon_index)
+    set(${OUTVAL} ${colon_index} PARENT_SCOPE)
+endfunction(find_colon INVAL OUTVAL)
+
+function(find_colon_v INVAL OUTVAL)
+    string(FIND "${${INVAL}}" ":" colon_index)
+    set(${OUTVAL} ${colon_index} PARENT_SCOPE)
+endfunction(find_colon_v INVAL OUTVAL)
+
+function(find_dir INVAL OUTVAL)
+    string(FIND "${INVAL}" "/" _STR ${ARGN})
+    set(${OUTVAL} ${_STR} PARENT_SCOPE)
+endfunction(find_dir INVAL OUTVAL)
+
+function(find_dir_v INVAL OUTVAL)
+    string(FIND "${${INVAL}}" "/" _STR ${ARGN})
+    set(${OUTVAL} ${_STR} PARENT_SCOPE)
+endfunction(find_dir_v INVAL OUTVAL)
+
+#
+function(str_left INVAL INDEX OUTVAL)
+    set(LEFT_INDEX ${INDEX})    
+    string(SUBSTRING "${INVAL}" 0 ${LEFT_INDEX} _LEFT_V)
+    set(${OUTVAL} ${_LEFT_V} PARENT_SCOPE)
+endfunction(str_left INVAL INDEX OUTVAL)
+
+function(str_right INVAL INDEX OUTVAL)
+    math(EXPR RIGHT_INDEX ${INDEX}+1)
+    string(SUBSTRING "${INVAL}" ${RIGHT_INDEX} -1 _RIGHT_V)
+    set(${OUTVAL} ${_RIGHT_V} PARENT_SCOPE)
+endfunction(str_right INVAL INDEX OUTVAL)
+
+function(str_left_v INVAL INDEX OUTVAL)
+    set(LEFT_INDEX ${${INDEX}})    
+    string(SUBSTRING "${${INVAL}}" 0 ${LEFT_INDEX} _LEFT_V)
+    set(${OUTVAL} ${_LEFT_V} PARENT_SCOPE)
+endfunction(str_left_v INVAL INDEX OUTVAL)
+
+function(str_right_v INVAL INDEX OUTVAL)
+    math(EXPR RIGHT_INDEX ${${INDEX}}+1)
+    string(SUBSTRING "${${INVAL}}" ${RIGHT_INDEX} -1 _RIGHT_V)
+    set(${OUTVAL} ${_RIGHT_V} PARENT_SCOPE)
+endfunction(str_right_v INVAL INDEX OUTVAL)
+
+#
+function(find_colon_plus INVAL OUTVAL)
+    find_colon(${INVAL} COLON_INDEX)
+    str_right(${INVAL} ${COLON_INDEX} COLON_RIGHT)
+    find_plus_v(COLON_RIGHT PLUS_INDEX)
+    str_left_v(COLON_RIGHT PLUS_INDEX COLON_RIGHT_LEFT_PLUS)
+
+    set(${OUTVAL} ${COLON_RIGHT_LEFT_PLUS} PARENT_SCOPE)
+endfunction(find_colon_plus INVAL OUTVAL)
+
+function(find_colon_plus_v INVAL OUTVAL)
+    find_colon_v(${INVAL} COLON_INDEX)
+    str_right_v(${INVAL} COLON_INDEX COLON_RIGHT)
+    find_plus_v(COLON_RIGHT PLUS_INDEX)
+    str_left_v(COLON_RIGHT PLUS_INDEX COLON_RIGHT_LEFT_PLUS)
+
+    set(${OUTVAL} ${COLON_RIGHT_LEFT_PLUS} PARENT_SCOPE)
+endfunction(find_colon_plus_v INVAL OUTVAL)
+
+function(find_dir_plus INVAL OUTVAL)
+    # t:*/*+d 
+    #    ^
+    find_dir("${INVAL}" SLASH_INDEX REVERSE)
+    str_right("${INVAL}" ${SLASH_INDEX} SLASH_RIGHT)
+    find_plus_v(SLASH_RIGHT PLUS_INDEX)
+    str_left_v(SLASH_RIGHT PLUS_INDEX SLASH_RIGHT_LEFT_PLUS)
+
+    set(${OUTVAL} ${SLASH_RIGHT_LEFT_PLUS} PARENT_SCOPE)
+endfunction(find_dir_plus INVAL OUTVAL)
+
+function(find_dir_plus_v INVAL OUTVAL)
+    # t:*/*+d 
+    #    ^
+    find_dir("${${INVAL}}" SLASH_INDEX REVERSE)
+    str_right("${${INVAL}}" ${SLASH_INDEX} SLASH_RIGHT)
+    find_plus_v(SLASH_RIGHT PLUS_INDEX)
+    str_left_v(SLASH_RIGHT PLUS_INDEX SLASH_RIGHT_LEFT_PLUS)
+
+    set(${OUTVAL} ${SLASH_RIGHT_LEFT_PLUS} PARENT_SCOPE)
+endfunction(find_dir_plus_v INVAL OUTVAL)
+
+
+# spark_add_library_source <target> ...
+# 扩展 一行一可执行目标 的构建的扩展宏
+# 在构建时将会另外加入这些资源
+macro(spark_add_library_source target)
+    set(${target}_ADD_SOURCE ${ARGN})
+endmacro(spark_add_library_source target)
+
+# 冗余的 target_link_qt5 或 qt6 的处理逻辑
+macro(_handle_spark_target_link_qt_macro _target)
+    if(SPARK_FIND_QT5)
+        target_link_qt5(${_target})
+    endif(SPARK_FIND_QT5)
+
+    if(SPARK_FIND_QT6)
+        target_link_qt6(${_target})
+    endif(SPARK_FIND_QT6)
+endmacro(_handle_spark_target_link_qt_macro _target)
 
 # spark_add_library_realpaths
 # 基于传入的项进行构建
 # 可接受的值为: 路径列表
 # 可接受的值为: 路径列表+依赖库A+依赖库B
 macro(spark_add_library_realpaths)
-    spark_debug_message("---> 基于传入的项进行构建 <---")
-    # spark_debug_message("--> src/unclassified/ItemDelegates/NdStyledItemDelegate")
-    # string(FIND <string> <substring> <output_variable> [REVERSE])
-    # string(SUBSTRING <string> <begin> <length> <output_variable>)
-    # math(EXPR value "100 * 0xA" OUTPUT_FORMAT DECIMAL)      # value is set to "1000"
 
     set(REALPATHS ${ARGN})
     foreach(REALPATH IN LISTS REALPATHS)
-        spark_debug_message("---> 传入路径: ${REALPATH} <--- ")
-        string(LENGTH "${REALPATH}" REALPATH_LENGTH)
-        spark_debug_message("---> 计算传入路径长度: --> 长度: ${REALPATH_LENGTH}")
-
-        string(FIND "${REALPATH}" "/" LASTINDEX REVERSE)
-        spark_debug_message("---> 计算传入路径末尾/位置: --> 长度: ${LASTINDEX}")
-        math(EXPR LASTINDEX "${LASTINDEX}+1")
-        spark_debug_message("---> 计算传入路径末尾/右移: --> 长度: ${LASTINDEX}")
-        string(SUBSTRING "${REALPATH}" ${LASTINDEX} ${REALPATH_LENGTH} REALNAME_Dependency)
 
+        # # 找 : 号下标,这是找:号的函数
+        # find_colon(${REALPATH} COLON_INDEX)
+        # 找 / 号下标,这是找/号的函数
+        find_dir_v(REALPATH SLASH_INDEX REVERSE)
         # 找 + 号下标,这是找+号的函数
-        find_plus(${REALPATH} RIGHT_PLUS)
-
-        # 判断是否有找到 + 号下标,值为 -1 或 正整数
-        if(RIGHT_PLUS LESS 0) # 小于0: 不存在 + 号
-            set(REALNAME "${REALNAME_Dependency}")
-            spark_debug_message("---> 传入路径末尾/右移部分: --> ${REALNAME} <-- 无依赖+")
-
-            spark_debug_message("---> 构建 ${REALNAME} -> ${REALNAME} ${REALPATH} ")
-
-            spark_add_library_path(${REALNAME} ${REALPATH})
-
-            if(SPARK_FIND_QT5)
-                target_link_qt5(${REALNAME})
-            endif(SPARK_FIND_QT5)
-
-            if(SPARK_FIND_QT6)
-                target_link_qt6(${REALNAME})
-            endif(SPARK_FIND_QT6)
-
+        find_plus_v(REALPATH PLUS_INDEX)
+
+        # +
+        if(PLUS_INDEX LESS 0)
+            # 完全没有 + 的情况下,它就是一个基于目录的构建
+            set(dir ${REALPATH})
+            str_right_v(REALPATH SLASH_INDEX target)
+
+            spark_add_library_path(${target} 
+                ${dir}
+                ${${target}_ADD_SOURCE}
+            )
+            # 使用 spark_add_library_realpaths 构建的依赖将允许直接引用库头文件
+            target_include_directories(${target} PUBLIC ${dir})
+            _handle_spark_target_link_qt_macro(${target})
         else()
-            spark_debug_message("---> 传入路径末尾/右移部分: --> ${REALNAME_Dependency} <-- 依赖+")
-
-            # 存在+号,将截取从 / 到 + 号之间的内容作为目标名称
-            # 例如 src/unclassified/widgets/DocTypeListView+JsonDeploy
-            #                             ^(LASTINDEX)    ^(RIGHT_PLUS)
-            # 将 RIGHT_PLUS - LASTINDEX 计算出 DocTypeListView 字符长度
-            math(EXPR REALNAME_LENGTH "${RIGHT_PLUS}-${LASTINDEX}")
-
-            spark_debug_message("---> 计算传入路径末尾/右移部分: --> 位置: ${RIGHT_PLUS}")
-            # spark_debug_message("---> 计算传入路径末尾/右移部分: --> 长度: ${REALNAME_Dependency}")
-
-            # 目标名称为 DocTypeListView
-            # 依赖为    JsonDeploy
-            # set(REALNAME "")
-            string(SUBSTRING "${REALPATH}" 0 ${RIGHT_PLUS} _REALPATH_DIR)
-            string(SUBSTRING "${REALPATH}" ${LASTINDEX} ${REALNAME_LENGTH} REALNAME)
-
-            spark_debug_message("---> 计算传入路径末尾/右移部分: --> 库名: ${REALNAME}")
-
-            string(SUBSTRING "${REALPATH}" ${RIGHT_PLUS} ${REALPATH_LENGTH} Dependency)
-            spark_debug_message("---> 计算传入路径末尾/右移部分: --> 库名: ${REALNAME} --> +部分: ${Dependency}")
-
-            # plus_list(${Dependency} dependencies dependencies_len)
-            string(REPLACE "+" ";" dependencies "${Dependency}")
-            spark_debug_message("---> 计算传入路径末尾/右移部分: --> 库名: ${REALNAME} --> +部分: ${Dependency} --> 列表: ${dependencies} <-- ")
-
-
-            spark_debug_message("---> 构建 ${REALNAME} -> ${REALNAME} ${_REALPATH_DIR}")
+            # 有 + 的情况下,获取 + 号下标右侧所有内容为 target_depends_str 并转为列表
+            str_right_v(REALPATH PLUS_INDEX target_depends_str)
+            string(REPLACE "+" ";" target_depends "${target_depends_str}")
+            
+            find_dir_plus_v(REALPATH target)
+            str_left_v(REALPATH PLUS_INDEX dir)
+
+            spark_add_library_path(${target} 
+                ${dir}
+                ${${target}_ADD_SOURCE}
+            )
+            spark_debug_message("  [INCLUDE_DIRS]: ${dir} ${dir}/.. \n")
+            target_include_directories(${target} PUBLIC ${dir} ${dir}/..)
+            target_link_libraries(${target} ${target_depends})
+        endif(PLUS_INDEX LESS 0)
 
-            spark_add_library_path(${REALNAME} ${_REALPATH_DIR})
-            # target_link_qt5(${REALNAME}) # 使用依赖的依赖或许也不错
-
-            target_include_directories(${REALNAME} PUBLIC ${_REALPATH_DIR})
-            target_link_libraries(${REALNAME} ${dependencies})
-
-        endif(RIGHT_PLUS LESS 0)
     endforeach(REALPATH IN LISTS REALPATHS)
 
 endmacro(spark_add_library_realpaths)
@@ -188,6 +181,7 @@ macro(spark_aux_source_paths AUX_VAR)
 endmacro(spark_aux_source_paths AUX_VAR)
 
 # spark_file_glob
+# 使用用 file(GLOB) 的匹配规则,并一次可匹配多个规则
 #
 macro(spark_file_glob FGLOB_VAR)
     set(${FGLOB_VAR} "")
@@ -235,3 +229,107 @@ macro(spark_add_source_paths SOURCE_VAR)
         endforeach(ui_src IN LISTS UI_SRCS)
     endforeach(source_path IN LISTS ${SOURCE_VAR}_PATHS)
 endmacro(spark_add_source_paths SOURCE_VAR)
+
+
+# spark_add_library_file_glob
+    # 
+macro(spark_add_library_file_glob _lib_name)
+    spark_file_glob(${_lib_name}_SOURCES ${ARGN})
+    spark_add_library(${_lib_name} ${${_lib_name}_SOURCES})
+endmacro(spark_add_library_file_glob _lib_name)
+
+
+
+# spark_add_executable_source <target> ...
+# 扩展 一行一可执行目标 的构建的扩展宏
+# 在构建时将会另外加入这些资源
+macro(spark_add_executable_source target)
+    set(${target}_ADD_SOURCE ${ARGN})
+endmacro(spark_add_executable_source target)
+
+# 冗余的 spark_add_executable_realpaths 的 dir 处理逻辑
+macro(_handle_spark_add_executable_realpaths_if_dir_empty_macro)
+    if("${dir}" STREQUAL "")
+        spark_add_executable(${target}
+            ${${target}_ADD_SOURCE}
+        )
+    else()
+        spark_add_executable_path(${target}
+            ${dir}
+            ${${target}_ADD_SOURCE}
+        )
+    endif("${dir}" STREQUAL "")
+endmacro(_handle_spark_add_executable_realpaths_if_dir_empty_macro)
+
+# spark_add_executable_realpaths
+# 基于传入的项进行构建
+# 可接受的值为: 可执行目标:路径列表
+# 可接受的值为: 可执行目标:路径列表+依赖库A+依赖库B
+macro(spark_add_executable_realpaths)
+
+    set(REALPATHS ${ARGN})
+    foreach(REALPATH IN LISTS REALPATHS)
+
+        # 找 : 号下标,这是找:号的函数
+        find_colon(${REALPATH} COLON_INDEX)
+
+        if(COLON_INDEX LESS 0)
+            # do not anything
+        else()
+            # 找到 : 号,将截取 target 名称
+            # string(SUBSTRING "${REALPATH}" 0 ${COLON_INDEX} REALTARGET)
+            find_colon_v(REALPATH COLON_INDEX)
+            str_left_v(REALPATH COLON_INDEX target)
+            str_right_v(REALPATH COLON_INDEX COLON_REMAIN)
+            # message(FATAL_ERROR "构建一个: ${target}") # 已验证
+
+        endif(COLON_INDEX LESS 0)
+
+        # 找 + 号下标,这是找+号的函数
+        find_plus_v(REALPATH PLUS_INDEX)
+
+        if(PLUS_INDEX LESS 0)
+            # 完全没有 + 的情况下,它就是一个基于目录的构建
+            set(dir ${COLON_REMAIN})
+            # spark_add_executable_path(${target}
+            #     ${dir}
+            #     ${${target}_ADD_SOURCE}
+            # )
+            _handle_spark_add_executable_realpaths_if_dir_empty_macro()
+            _handle_spark_target_link_qt_macro(${target})
+        else()
+            # 有 + 的情况下,获取 + 号下标右侧所有内容为 target_depends_str 并转为列表
+            str_right_v(REALPATH PLUS_INDEX target_depends_str)
+            string(REPLACE "+" ";" target_depends "${target_depends_str}")
+
+            # 再从主要内容中获取 dir ,以及
+            find_colon_plus_v(REALPATH dir)
+            # spark_add_executable_path(${target}
+            #     ${dir}
+            #     ${${target}_ADD_SOURCE}
+            # )
+            _handle_spark_add_executable_realpaths_if_dir_empty_macro()
+            target_include_directories(${target} PUBLIC ${dir} ${dir}/..)
+            target_link_libraries(${target} ${target_depends})
+        endif(PLUS_INDEX LESS 0)
+    endforeach(REALPATH IN LISTS REALPATHS)
+
+endmacro(spark_add_executable_realpaths)
+
+
+# 一行一库概念构建
+# 1.构建一个库,基于指定的目录路径进行构建
+#   src/widgets/DocTypeListView
+#              ^目录将被用于制作的目标名称
+#               目录下的所有文件将被用于制作此库的源代码文件
+#
+# 2.构建一个库,基于指定的目录路径进行构建,并依赖其后面所列出的依赖项
+#   src/widgets/MaintainerInfoView+DocTypeListView+...
+#                                  ^此库将被用于 MaintainerInfoView 库的依赖
+#                                 ^此符号'+'将被视为依赖项列表的分隔符
+
+# 一行一可执行目标概念
+# 1.构建一个可执行目标,基于指定的目录路径进行构建(行不通,可执行目标很少为一个目录)
+# 2.构建一个可执行目标,基于指定的文件路径进行构建(也许可以)
+# 3.构建一个可执行目标,基于指定的文件名称进行构建()
+# 4.构建一个可执行目标,基于指定命名规则(target:dir:dir+depend+depend...)

+ 48 - 0
cmake/SparkTranslatorConfig.cmake

@@ -0,0 +1,48 @@
+cmake_minimum_required(VERSION 3.5.1)
+
+# translator_qt5 _qmvar [... *.ts]
+macro(translator_qt5 _qmvar)
+    # set(SPARK_TRANSLATIONS_ARGN ${ARGN})
+    # file(GLOB SPARK_TRANSLATIONS ${SPARK_TRANSLATIONS_ARGN})
+    # qt5_add_translation(SPARK_QM_TRANSLATIONS 
+        # ${SPARK_TRANSLATIONS})
+    
+    set(${_qmvar}_ARNG ${ARGN})
+    file(GLOB ${_qmvar}_TS_FILES ${${_qmvar}_ARNG})
+    find_package(Qt5LinguistTools)
+
+    qt5_add_translation(${_qmvar}
+        ${${_qmvar}_TS_FILES})
+
+    set(SPARK_QM_TRANSLATIONS ${_qmvar})
+    
+    # 注意,必须将 SPARK_QM_TRANSLATIONS 或 ${_qmvar} 加入到 add_executable 参数中才能在编译时生成只有原文的ts文件
+    
+    # qt5_create_translation
+        # ts文件会在 make clean 或重新编译的时候一并被删除,再编译的时候生成全新的ts(原有的翻译会丢失,万分注意!!)
+    
+    # qt5_add_translation
+        # 此宏比较稳定
+endmacro(translator_qt5 _qmvar)
+
+
+# translator_qt6 _qmvar [... *.ts]
+macro(translator_qt6 _qmvar)
+    # todo
+endmacro(translator_qt6 _qmvar)
+
+# 冗余的 translator_qt5 或 qt6 的处理逻辑
+macro(_handle_spark_translator_qt_macro _outvar)
+    if(SPARK_FIND_QT5)
+        translator_qt5(${_outvar} ${ARGN})
+    endif(SPARK_FIND_QT5)
+
+    if(SPARK_FIND_QT6)
+        translator_qt6(${_outvar} ${ARGN})
+    endif(SPARK_FIND_QT6)
+endmacro(_handle_spark_translator_qt_macro _outvar)
+
+# translator_qt _qmvar [... *.ts | match]
+macro(translator_qt)
+    _handle_spark_translator_qt_macro(${ARGN})
+endmacro(translator_qt)

+ 48 - 0
cmake/linuxdeployqt-help

@@ -0,0 +1,48 @@
+linuxdeployqt  (commit 5fa79fa), build 36 built on 2022-08-21 12:36:03 UTC
+WARNING: Not checking glibc on the host system.
+         The resulting AppDir or AppImage may not run on older systems.
+         This mode is unsupported and discouraged.
+         For more information, please see
+         https://github.com/probonopd/linuxdeployqt/issues/340
+
+Usage: linuxdeployqt <app-binary|desktop file> [options]
+
+Options:
+   -always-overwrite        : Copy files even if the target file exists.
+   -appimage                : Create an AppImage (implies -bundle-non-qt-libs).
+   -bundle-non-qt-libs      : Also bundle non-core, non-Qt libraries.
+   -exclude-libs=<list>     : List of libraries which should be excluded,
+                              separated by comma.
+   -ignore-glob=<glob>      : Glob pattern relative to appdir to ignore when
+                              searching for libraries.
+   -executable=<path>       : Let the given executable use the deployed libraries
+                              too
+   -extra-plugins=<list>    : List of extra plugins which should be deployed,
+                              separated by comma.
+   -no-copy-copyright-files : Skip deployment of copyright files.
+   -no-plugins              : Skip plugin deployment.
+   -no-strip                : Don't run 'strip' on the binaries.
+   -no-translations         : Skip deployment of translations.
+   -qmake=<path>            : The qmake executable to use.
+   -qmldir=<path>           : Scan for QML imports in the given path.
+   -qmlimport=<path>        : Add the given path to QML module search locations.
+   -show-exclude-libs       : Print exclude libraries list.
+   -verbose=<0-3>           : 0 = no output, 1 = error/warning (default),
+                              2 = normal, 3 = debug.
+   -updateinformation=<update string>        : Embed update information STRING; if zsyncmake is installed, generate zsync file
+   -qtlibinfix=<infix>      : Adapt the .so search if your Qt distribution has infix.
+   -version                 : Print version statement and exit.
+
+linuxdeployqt takes an application as input and makes it
+self-contained by copying in the Qt libraries and plugins that
+the application uses.
+
+By default it deploys the Qt instance that qmake on the $PATH points to.
+The '-qmake' option can be used to point to the qmake executable
+to be used instead.
+
+Plugins related to a Qt library are copied in with the library.
+
+See the "Deploying Applications on Linux" topic in the
+documentation for more information about deployment on Linux.
+zinface@zinface-PC:/tmp/tmp.5gmZKUqn9s$ 

+ 1 - 1
cmake/platforms/linux-appimage.cmake

@@ -43,6 +43,6 @@ if(USE_LINUX_APPIMAGE)
     include(cmake/SparkAppimageConfig.cmake)  # 导入来自 Spark 构建的 Appimage 构建
     add_appimage_icon(assets/spark.png)       # 添加到 Appimage 中的默认的图标
     add_appimage_desktop()                    # 添加到 Appimage 中的默认desktop(使用来自 Spark 构建的 Desktop 构建中配置的信息(必须要求 spark-desktop))
-    add_appimage()                            # 应用对 Appimage 的构建
+    add_appimage_target(${PROJECT_NAME})      # 应用对 Appimage 的构建
 
 endif(USE_LINUX_APPIMAGE)

+ 2 - 2
cmake/platforms/linux-debian.cmake

@@ -9,8 +9,8 @@ option(USE_LINUX_DEBIAN "为 Linux 生成 deb 软件包" OFF)
 
 if(USE_LINUX_DEBIAN)
     
-    find_package(DebPackage PATHS ${CMAKE_SOURCE_DIR})
-    add_package_descript(cmake/package-deb.descript)
+    find_package(SparkDebPackage PATHS ${CMAKE_SOURCE_DIR})
+    add_package_descript(cmake/spark-deb-package.descript)
 
 endif(USE_LINUX_DEBIAN)
 

+ 2 - 2
cmake/platforms/linux-uos.cmake

@@ -66,7 +66,7 @@ if(USE_LINUX_UOS)
     # set(PACKAGE_SUFFIX "_onlyUos") 
 
     # 2. 使用同样来自 debian 系列的 deb 构建能力
-    find_package(DebPackage PATHS ${CMAKE_SOURCE_DIR})
-    add_package_descript(cmake/package-deb.descript)
+    find_package(SparkDebPackage PATHS ${CMAKE_SOURCE_DIR})
+    add_package_descript(cmake/spark-deb-package.descript)
 
 endif(USE_LINUX_UOS)

+ 9 - 0
cmake/spark-appimage.desktop.in.txt

@@ -0,0 +1,9 @@
+[Desktop Entry]
+Name=@APP_NAME@
+Name[zh_CN]=@APP_NAME_ZH_CN@
+Exec=AppRun %F
+Icon=default
+Comment=@APP_COMMENT@
+Terminal=true
+Type=Application
+Categories=@APP_CATEGORIES@;

+ 5 - 3
cmake/package-deb.descript → cmake/spark-deb-package.descript

@@ -1,6 +1,6 @@
 # 注释行(使用方式)
-# find_package(DebPackage PATHS ${CMAKE_SOURCE_DIR})
-# add_package_descript(cmake/package-deb.descript)
+# find_package(SparkDebPackage PATHS ${CMAKE_SOURCE_DIR})
+# add_package_descript(cmake/spark-deb-package.descript)
 
 # 打包后的文件名称
 # FileName: 待定
@@ -40,7 +40,9 @@ Maintainer: Ndd开源组织 <[email protected]>
 # 软件包主页
 Homepage: https://gitee.com/cxasm/notepad--
 # 软件包建议
-Recommends:
+Recommends: 
+# 软件冲突
+Conflicts: 
 # 软件包描述信息
 Descrition: Notepad--是一个国产跨平台、简单的文本编辑器。
  Notepad--是一个国产跨平台、简单的文本编辑器,是替换notepad++的一种选择。

+ 2 - 1
cmake/spark-desktop.desktop.in → cmake/spark-desktop.desktop.in.txt

@@ -9,4 +9,5 @@ Icon=@APP_EXECUTE_ICON_PATH@
 Categories=@APP_CATEGORIES@
 MimeType=text/english;text/plain;text/x-makefile;text/x-c++hdr;text/x-c++src;text/x-chdr;text/x-csrc;text/x-java;text/x-moc;text/x-pascal;text/x-tcl;text/x-tex;application/x-shellscript;text/x-patch;text/x-adasrc;text/x-chdr;text/x-csrc;text/css;application/x-desktop;text/x-patch;text/x-fortran;text/html;text/x-java;text/x-tex;text/x-makefile;text/x-objcsrc;text/x-pascal;application/x-perl;application/x-perl;application/x-php;text/vnd.wap.wml;text/x-python;application/x-ruby;text/sgml;application/xml;model/vrml;image/svg+xml;application/json;
 
-# Generated from the DesktopGenerater component of the z-Tools toolkit
+# Generated from the DesktopGenerater component of the z-Tools toolkit
+