diff options
author | alexv-smirnov <alex@ydb.tech> | 2023-07-28 17:10:28 +0300 |
---|---|---|
committer | alexv-smirnov <alex@ydb.tech> | 2023-07-28 17:10:28 +0300 |
commit | c8ce88b2d80cb6860aa97818c49d65b90c7bd31b (patch) | |
tree | be14b6f8d5b8fd6e264a047a22dc78d760981159 /build | |
parent | 128317b5f4cb38480a6c3407e25a144ce3026a1c (diff) | |
download | ydb-c8ce88b2d80cb6860aa97818c49d65b90c7bd31b.tar.gz |
Add yexport export generators
Diffstat (limited to 'build')
18 files changed, 1706 insertions, 0 deletions
diff --git a/build/export_generators/gradle/build.gradle.kts b/build/export_generators/gradle/build.gradle.kts new file mode 100644 index 0000000000..832326200e --- /dev/null +++ b/build/export_generators/gradle/build.gradle.kts @@ -0,0 +1,2 @@ +group = "tech.ytsaurus" +version = "1.0.0" diff --git a/build/export_generators/gradle/build.gradle.kts.jinja b/build/export_generators/gradle/build.gradle.kts.jinja new file mode 100644 index 0000000000..321615dced --- /dev/null +++ b/build/export_generators/gradle/build.gradle.kts.jinja @@ -0,0 +1,80 @@ +plugins { +{% if targets|selectattr("app_main_class") -%} + `application` +{% else -%} + `java-library` +{% endif -%} +{% if targets|selectattr('publish') -%} + `maven-publish` + `signing` +{% endif -%} +} + +{% if targets|selectattr('publish') -%} +group = "{{ targets[0].publish_group }}" +version = project.properties["version"] + +{% endif -%} +repositories { + mavenCentral() +} + +{% if targets|selectattr("app_main_class") -%} +application { +{% for target in targets|selectattr("app_main_class") -%} + mainClass.set("{{ target.app_main_class }}") +{% endfor -%} +} + +{% endif -%} +java { + withSourcesJar() + withJavadocJar() +} + +dependencies{ +{% for target in targets -%} +{% if target.junit5_test -%} + testImplementation("org.junit.jupiter:junit-jupiter:5.8.2") + + api("org.apache.commons:commons-math3:3.6.1") + + api("com.google.guava:guava:31.0.1-jre") +{% endif -%} +{% for library in target.consumer_classpath -%} +{% if targets|selectattr("app_main_class") -%} +{% if target.lib_excludes is defined and target.lib_excludes[library]|length > 0 -%} + implementation({{ library }}) { +{% for exclude in target.lib_excludes[library] -%} + exclude group: '{{ exclude[0] }}', module: '{{ exclude[1] }}' +{% endfor -%} + } +{% else -%} + implementation({{ library }}) +{% endif -%} +{% elif target.isTest -%} + testImplementation({{ library }}) +{% else -%} + api({{ library }}) +{% endif -%} +{% endfor -%} +{% endfor -%} +} + +{% if targets|selectattr("junit5_test") -%} +tasks.named<Test>("test") { + useJUnitPlatform() +} + +{% endif -%} +tasks.test { + testLogging { + showStandardStreams = true + events("passed", "skipped", "failed") + } +} + +{% include "extra-tests.gradle.kts" ignore missing %} +{% if targets|selectattr('publish') -%} +{% include 'publish.gradle.kts' -%} +{% endif -%} diff --git a/build/export_generators/gradle/build.gradle.kts.proto.jinja b/build/export_generators/gradle/build.gradle.kts.proto.jinja new file mode 100644 index 0000000000..fe19ef73c0 --- /dev/null +++ b/build/export_generators/gradle/build.gradle.kts.proto.jinja @@ -0,0 +1,71 @@ +import com.google.protobuf.gradle.* + +val buildProtoDir = File("${buildDir}", "__proto__") + +plugins { + id("java-library") + id("com.google.protobuf") version "0.8.19" +{% if targets|selectattr('publish') -%} + `maven-publish` + `signing` +{% endif -%} +} + +{% if targets|selectattr('publish') -%} +group = "{{ targets[0].publish_group }}" +version = project.properties["version"] +{% endif -%} + +repositories { + mavenCentral() +} + +java { + withSourcesJar() + withJavadocJar() +} + +dependencies { +{% for library in targets[0].consumer_classpath -%} + api({{ library }}) +{% endfor -%} + +{% if targets[0].proto_namespace is defined -%} + protobuf(files(File(buildProtoDir, "{{ targets[0].proto_namespace }}"))) +{% else -%} + protobuf(files(buildProtoDir)) +{% endif -%} +} + +{% if targets[0].proto_grpc is defined -%} +protobuf { + plugins { + id("grpc") { + artifact = "io.grpc:protoc-gen-grpc-java:1.45.0" + } + } + generateProtoTasks { + ofSourceSet("main").forEach { + it.plugins { + id("grpc") + } + } + } +} +{% endif -%} + +val prepareProto = tasks.register<Copy>("prepareProto") { + from(rootDir) { +{% for proto in targets[0].proto_files -%} + include("{{ proto }}") +{% endfor -%} + } + into(buildProtoDir) +} + +afterEvaluate { + tasks.getByName("extractProto").dependsOn(prepareProto) +} +{% if targets|selectattr('publish') -%} +{% include 'publish.gradle.kts' -%} +{% endif -%} diff --git a/build/export_generators/gradle/generator.toml b/build/export_generators/gradle/generator.toml new file mode 100644 index 0000000000..2bcccd3610 --- /dev/null +++ b/build/export_generators/gradle/generator.toml @@ -0,0 +1,47 @@ +[root] +template="settings.gradle.kts.jinja" +copy=[ + "build.gradle.kts", + "gradlew", + "gradlew.bat", + "gradle/wrapper/gradle-wrapper.jar", + "gradle/wrapper/gradle-wrapper.properties" +] + +[targets.jar] +template="build.gradle.kts.jinja" + +[targets.jar_proto] +template={ path="build.gradle.kts.proto.jinja", dest="build.gradle.kts" } + +[attrs.target] +proto_files="list" +proto_grpc="flag" +proto_namespace="str" +required_jdk="str" +add_vcs_info_to_mf="bool" +junit4_test="flag" +junit5_test="flag" +app_main_class="str" +publish="flag" +publish_group="str" +publish_version="str" +applied_excludes="list" +peers_closure="list" +peers_closure_coords="list" +excludes_rules="list" + +[attrs.root] + +[attrs.dir] + +[attrs.induced] +consumer_classpath="list" + +[merge] +test=[ + "/ut", + "/src/test", + "/src/test/java", + "/src/test-integration" +] diff --git a/build/export_generators/gradle/gradle/wrapper/gradle-wrapper.jar b/build/export_generators/gradle/gradle/wrapper/gradle-wrapper.jar Binary files differnew file mode 100644 index 0000000000..943f0cbfa7 --- /dev/null +++ b/build/export_generators/gradle/gradle/wrapper/gradle-wrapper.jar diff --git a/build/export_generators/gradle/gradle/wrapper/gradle-wrapper.properties b/build/export_generators/gradle/gradle/wrapper/gradle-wrapper.properties new file mode 100644 index 0000000000..f398c33c4b --- /dev/null +++ b/build/export_generators/gradle/gradle/wrapper/gradle-wrapper.properties @@ -0,0 +1,6 @@ +distributionBase=GRADLE_USER_HOME +distributionPath=wrapper/dists +distributionUrl=https\://services.gradle.org/distributions/gradle-7.6-bin.zip +networkTimeout=10000 +zipStoreBase=GRADLE_USER_HOME +zipStorePath=wrapper/dists diff --git a/build/export_generators/gradle/gradlew b/build/export_generators/gradle/gradlew new file mode 100755 index 0000000000..65dcd68d65 --- /dev/null +++ b/build/export_generators/gradle/gradlew @@ -0,0 +1,244 @@ +#!/bin/sh + +# +# Copyright © 2015-2021 the original authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +############################################################################## +# +# Gradle start up script for POSIX generated by Gradle. +# +# Important for running: +# +# (1) You need a POSIX-compliant shell to run this script. If your /bin/sh is +# noncompliant, but you have some other compliant shell such as ksh or +# bash, then to run this script, type that shell name before the whole +# command line, like: +# +# ksh Gradle +# +# Busybox and similar reduced shells will NOT work, because this script +# requires all of these POSIX shell features: +# * functions; +# * expansions «$var», «${var}», «${var:-default}», «${var+SET}», +# «${var#prefix}», «${var%suffix}», and «$( cmd )»; +# * compound commands having a testable exit status, especially «case»; +# * various built-in commands including «command», «set», and «ulimit». +# +# Important for patching: +# +# (2) This script targets any POSIX shell, so it avoids extensions provided +# by Bash, Ksh, etc; in particular arrays are avoided. +# +# The "traditional" practice of packing multiple parameters into a +# space-separated string is a well documented source of bugs and security +# problems, so this is (mostly) avoided, by progressively accumulating +# options in "$@", and eventually passing that to Java. +# +# Where the inherited environment variables (DEFAULT_JVM_OPTS, JAVA_OPTS, +# and GRADLE_OPTS) rely on word-splitting, this is performed explicitly; +# see the in-line comments for details. +# +# There are tweaks for specific operating systems such as AIX, CygWin, +# Darwin, MinGW, and NonStop. +# +# (3) This script is generated from the Groovy template +# https://github.com/gradle/gradle/blob/HEAD/subprojects/plugins/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt +# within the Gradle project. +# +# You can find Gradle at https://github.com/gradle/gradle/. +# +############################################################################## + +# Attempt to set APP_HOME + +# Resolve links: $0 may be a link +app_path=$0 + +# Need this for daisy-chained symlinks. +while + APP_HOME=${app_path%"${app_path##*/}"} # leaves a trailing /; empty if no leading path + [ -h "$app_path" ] +do + ls=$( ls -ld "$app_path" ) + link=${ls#*' -> '} + case $link in #( + /*) app_path=$link ;; #( + *) app_path=$APP_HOME$link ;; + esac +done + +# This is normally unused +# shellcheck disable=SC2034 +APP_BASE_NAME=${0##*/} +APP_HOME=$( cd "${APP_HOME:-./}" && pwd -P ) || exit + +# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. +DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"' + +# Use the maximum available, or set MAX_FD != -1 to use that value. +MAX_FD=maximum + +warn () { + echo "$*" +} >&2 + +die () { + echo + echo "$*" + echo + exit 1 +} >&2 + +# OS specific support (must be 'true' or 'false'). +cygwin=false +msys=false +darwin=false +nonstop=false +case "$( uname )" in #( + CYGWIN* ) cygwin=true ;; #( + Darwin* ) darwin=true ;; #( + MSYS* | MINGW* ) msys=true ;; #( + NONSTOP* ) nonstop=true ;; +esac + +CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar + + +# Determine the Java command to use to start the JVM. +if [ -n "$JAVA_HOME" ] ; then + if [ -x "$JAVA_HOME/jre/sh/java" ] ; then + # IBM's JDK on AIX uses strange locations for the executables + JAVACMD=$JAVA_HOME/jre/sh/java + else + JAVACMD=$JAVA_HOME/bin/java + fi + if [ ! -x "$JAVACMD" ] ; then + die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME + +Please set the JAVA_HOME variable in your environment to match the +location of your Java installation." + fi +else + JAVACMD=java + which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. + +Please set the JAVA_HOME variable in your environment to match the +location of your Java installation." +fi + +# Increase the maximum file descriptors if we can. +if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then + case $MAX_FD in #( + max*) + # In POSIX sh, ulimit -H is undefined. That's why the result is checked to see if it worked. + # shellcheck disable=SC3045 + MAX_FD=$( ulimit -H -n ) || + warn "Could not query maximum file descriptor limit" + esac + case $MAX_FD in #( + '' | soft) :;; #( + *) + # In POSIX sh, ulimit -n is undefined. That's why the result is checked to see if it worked. + # shellcheck disable=SC3045 + ulimit -n "$MAX_FD" || + warn "Could not set maximum file descriptor limit to $MAX_FD" + esac +fi + +# Collect all arguments for the java command, stacking in reverse order: +# * args from the command line +# * the main class name +# * -classpath +# * -D...appname settings +# * --module-path (only if needed) +# * DEFAULT_JVM_OPTS, JAVA_OPTS, and GRADLE_OPTS environment variables. + +# For Cygwin or MSYS, switch paths to Windows format before running java +if "$cygwin" || "$msys" ; then + APP_HOME=$( cygpath --path --mixed "$APP_HOME" ) + CLASSPATH=$( cygpath --path --mixed "$CLASSPATH" ) + + JAVACMD=$( cygpath --unix "$JAVACMD" ) + + # Now convert the arguments - kludge to limit ourselves to /bin/sh + for arg do + if + case $arg in #( + -*) false ;; # don't mess with options #( + /?*) t=${arg#/} t=/${t%%/*} # looks like a POSIX filepath + [ -e "$t" ] ;; #( + *) false ;; + esac + then + arg=$( cygpath --path --ignore --mixed "$arg" ) + fi + # Roll the args list around exactly as many times as the number of + # args, so each arg winds up back in the position where it started, but + # possibly modified. + # + # NB: a `for` loop captures its iteration list before it begins, so + # changing the positional parameters here affects neither the number of + # iterations, nor the values presented in `arg`. + shift # remove old arg + set -- "$@" "$arg" # push replacement arg + done +fi + +# Collect all arguments for the java command; +# * $DEFAULT_JVM_OPTS, $JAVA_OPTS, and $GRADLE_OPTS can contain fragments of +# shell script including quotes and variable substitutions, so put them in +# double quotes to make sure that they get re-expanded; and +# * put everything else in single quotes, so that it's not re-expanded. + +set -- \ + "-Dorg.gradle.appname=$APP_BASE_NAME" \ + -classpath "$CLASSPATH" \ + org.gradle.wrapper.GradleWrapperMain \ + "$@" + +# Stop when "xargs" is not available. +if ! command -v xargs >/dev/null 2>&1 +then + die "xargs is not available" +fi + +# Use "xargs" to parse quoted args. +# +# With -n1 it outputs one arg per line, with the quotes and backslashes removed. +# +# In Bash we could simply go: +# +# readarray ARGS < <( xargs -n1 <<<"$var" ) && +# set -- "${ARGS[@]}" "$@" +# +# but POSIX shell has neither arrays nor command substitution, so instead we +# post-process each arg (as a line of input to sed) to backslash-escape any +# character that might be a shell metacharacter, then use eval to reverse +# that process (while maintaining the separation between arguments), and wrap +# the whole thing up as a single "set" statement. +# +# This will of course break if any of these variables contains a newline or +# an unmatched quote. +# + +eval "set -- $( + printf '%s\n' "$DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS" | + xargs -n1 | + sed ' s~[^-[:alnum:]+,./:=@_]~\\&~g; ' | + tr '\n' ' ' + )" '"$@"' + +exec "$JAVACMD" "$@" diff --git a/build/export_generators/gradle/gradlew.bat b/build/export_generators/gradle/gradlew.bat new file mode 100644 index 0000000000..6689b85bee --- /dev/null +++ b/build/export_generators/gradle/gradlew.bat @@ -0,0 +1,92 @@ +@rem
+@rem Copyright 2015 the original author or authors.
+@rem
+@rem Licensed under the Apache License, Version 2.0 (the "License");
+@rem you may not use this file except in compliance with the License.
+@rem You may obtain a copy of the License at
+@rem
+@rem https://www.apache.org/licenses/LICENSE-2.0
+@rem
+@rem Unless required by applicable law or agreed to in writing, software
+@rem distributed under the License is distributed on an "AS IS" BASIS,
+@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+@rem See the License for the specific language governing permissions and
+@rem limitations under the License.
+@rem
+
+@if "%DEBUG%"=="" @echo off
+@rem ##########################################################################
+@rem
+@rem Gradle startup script for Windows
+@rem
+@rem ##########################################################################
+
+@rem Set local scope for the variables with windows NT shell
+if "%OS%"=="Windows_NT" setlocal
+
+set DIRNAME=%~dp0
+if "%DIRNAME%"=="" set DIRNAME=.
+@rem This is normally unused
+set APP_BASE_NAME=%~n0
+set APP_HOME=%DIRNAME%
+
+@rem Resolve any "." and ".." in APP_HOME to make it shorter.
+for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi
+
+@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
+set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m"
+
+@rem Find java.exe
+if defined JAVA_HOME goto findJavaFromJavaHome
+
+set JAVA_EXE=java.exe
+%JAVA_EXE% -version >NUL 2>&1
+if %ERRORLEVEL% equ 0 goto execute
+
+echo.
+echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
+echo.
+echo Please set the JAVA_HOME variable in your environment to match the
+echo location of your Java installation.
+
+goto fail
+
+:findJavaFromJavaHome
+set JAVA_HOME=%JAVA_HOME:"=%
+set JAVA_EXE=%JAVA_HOME%/bin/java.exe
+
+if exist "%JAVA_EXE%" goto execute
+
+echo.
+echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
+echo.
+echo Please set the JAVA_HOME variable in your environment to match the
+echo location of your Java installation.
+
+goto fail
+
+:execute
+@rem Setup the command line
+
+set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
+
+
+@rem Execute Gradle
+"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %*
+
+:end
+@rem End local scope for the variables with windows NT shell
+if %ERRORLEVEL% equ 0 goto mainEnd
+
+:fail
+rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
+rem the _cmd.exe /c_ return code!
+set EXIT_CODE=%ERRORLEVEL%
+if %EXIT_CODE% equ 0 set EXIT_CODE=1
+if not ""=="%GRADLE_EXIT_CONSOLE%" exit %EXIT_CODE%
+exit /b %EXIT_CODE%
+
+:mainEnd
+if "%OS%"=="Windows_NT" endlocal
+
+:omega
diff --git a/build/export_generators/gradle/settings.gradle.kts.jinja b/build/export_generators/gradle/settings.gradle.kts.jinja new file mode 100644 index 0000000000..9a22c7f6b5 --- /dev/null +++ b/build/export_generators/gradle/settings.gradle.kts.jinja @@ -0,0 +1,5 @@ +rootProject.name = "{{projectName}}" + +{% for subdir in subdirs -%} +include("{{ subdir | replace("/", ":") }}") +{% endfor -%} diff --git a/build/export_generators/hardcoded-cmake/cmake/common.cmake b/build/export_generators/hardcoded-cmake/cmake/common.cmake new file mode 100644 index 0000000000..2f266468ac --- /dev/null +++ b/build/export_generators/hardcoded-cmake/cmake/common.cmake @@ -0,0 +1,304 @@ +# Set of common macros + +find_package(Python3 REQUIRED) + +add_compile_definitions(CATBOOST_OPENSOURCE=yes) + +# assumes ToolName is always both the binary and the target name +function(get_built_tool_path OutBinPath OutDependency SrcPath ToolName) + if (CMAKE_GENERATOR MATCHES "Visual.Studio.*") + set(BinPath "${TOOLS_ROOT}/${SrcPath}/\$(Configuration)/${ToolName}${CMAKE_EXECUTABLE_SUFFIX}") + else() + set(BinPath "${TOOLS_ROOT}/${SrcPath}/${ToolName}${CMAKE_EXECUTABLE_SUFFIX}") + endif() + set(${OutBinPath} ${BinPath} PARENT_SCOPE) + if (CMAKE_CROSSCOMPILING) + set(${OutDependency} ${BinPath} PARENT_SCOPE) + else() + set(${OutDependency} ${ToolName} PARENT_SCOPE) + endif() +endfunction() + + +function(target_ragel_lexers TgtName Key Src) + SET(RAGEL_BIN ${CMAKE_BINARY_DIR}/bin/ragel${CMAKE_EXECUTABLE_SUFFIX}) + get_filename_component(OutPath ${Src} NAME_WLE) + get_filename_component(SrcDirPath ${Src} DIRECTORY) + get_filename_component(OutputExt ${OutPath} EXT) + if (OutputExt STREQUAL "") + string(APPEND OutPath .rl6.cpp) + endif() + add_custom_command( + OUTPUT ${CMAKE_CURRENT_BINARY_DIR}/${OutPath} + COMMAND Python3::Interpreter ${CMAKE_SOURCE_DIR}/build/scripts/run_tool.py -- ${RAGEL_BIN} ${RAGEL_FLAGS} ${ARGN} -o ${CMAKE_CURRENT_BINARY_DIR}/${OutPath} ${Src} + DEPENDS ${CMAKE_SOURCE_DIR}/build/scripts/run_tool.py ${Src} + WORKING_DIRECTORY ${SrcDirPath} + ) + target_sources(${TgtName} ${Key} ${CMAKE_CURRENT_BINARY_DIR}/${OutPath}) +endfunction() + +function(target_yasm_source TgtName Key Src) + SET(YASM_BIN ${CMAKE_BINARY_DIR}/bin/yasm${CMAKE_EXECUTABLE_SUFFIX}) + get_filename_component(OutPath ${Src} NAME_WLE) + string(APPEND OutPath .o) + add_custom_command( + OUTPUT ${CMAKE_CURRENT_BINARY_DIR}/${OutPath} + COMMAND Python3::Interpreter ${CMAKE_SOURCE_DIR}/build/scripts/run_tool.py -- ${YASM_BIN} ${YASM_FLAGS} ${ARGN} -o ${CMAKE_CURRENT_BINARY_DIR}/${OutPath} ${Src} + DEPENDS ${CMAKE_SOURCE_DIR}/build/scripts/run_tool.py ${Src} + ) + target_sources(${TgtName} ${Key} ${CMAKE_CURRENT_BINARY_DIR}/${OutPath}) +endfunction() + +function(target_joined_source TgtName Out) + foreach(InSrc ${ARGN}) + file(RELATIVE_PATH IncludePath ${CMAKE_SOURCE_DIR} ${InSrc}) + list(APPEND IncludesList ${IncludePath}) + endforeach() + add_custom_command( + OUTPUT ${CMAKE_CURRENT_BINARY_DIR}/${Out} + COMMAND Python3::Interpreter ${CMAKE_SOURCE_DIR}/build/scripts/gen_join_srcs.py ${CMAKE_CURRENT_BINARY_DIR}/${Out} ${IncludesList} + DEPENDS ${CMAKE_SOURCE_DIR}/build/scripts/gen_join_srcs.py ${ARGN} + ) + target_sources(${TgtName} PRIVATE ${CMAKE_CURRENT_BINARY_DIR}/${Out}) +endfunction() + +function(target_sources_custom TgtName CompileOutSuffix) + set(opts "") + set(oneval_args "") + set(multival_args SRCS CUSTOM_FLAGS) + cmake_parse_arguments(TARGET_SOURCES_CUSTOM + "${opts}" + "${oneval_args}" + "${multival_args}" + ${ARGN} + ) + + foreach(Src ${TARGET_SOURCES_CUSTOM_SRCS}) + file(RELATIVE_PATH SrcRealPath ${CMAKE_SOURCE_DIR} ${Src}) + get_filename_component(SrcDir ${SrcRealPath} DIRECTORY) + get_filename_component(SrcName ${SrcRealPath} NAME_WLE) + get_filename_component(SrcExt ${SrcRealPath} LAST_EXT) + set(SrcCopy "${CMAKE_BINARY_DIR}/${SrcDir}/${SrcName}${CompileOutSuffix}${SrcExt}") + add_custom_command( + OUTPUT ${SrcCopy} + COMMAND ${CMAKE_COMMAND} -E copy ${Src} ${SrcCopy} + DEPENDS ${Src} + ) + list(APPEND PreparedSrc ${SrcCopy}) + set_property( + SOURCE + ${SrcCopy} + APPEND PROPERTY COMPILE_OPTIONS + ${TARGET_SOURCES_CUSTOM_CUSTOM_FLAGS} + -I${CMAKE_SOURCE_DIR}/${SrcDir} + ) + endforeach() + + target_sources( + ${TgtName} + PRIVATE + ${PreparedSrc} + ) +endfunction() + +function(generate_enum_serilization Tgt Input) + set(opts "") + set(oneval_args INCLUDE_HEADERS) + set(multival_args "") + cmake_parse_arguments(ENUM_SERIALIZATION_ARGS + "${opts}" + "${oneval_args}" + "${multival_args}" + ${ARGN} + ) + + get_built_tool_path(enum_parser_bin enum_parser_dependency tools/enum_parser/enum_parser enum_parser) + + get_filename_component(BaseName ${Input} NAME) + add_custom_command( + OUTPUT ${CMAKE_CURRENT_BINARY_DIR}/${BaseName}_serialized.cpp + COMMAND + ${enum_parser_bin} + ${Input} + --include-path ${ENUM_SERIALIZATION_ARGS_INCLUDE_HEADERS} + --output ${CMAKE_CURRENT_BINARY_DIR}/${BaseName}_serialized.cpp + DEPENDS ${Input} ${enum_parser_dependency} + ) + target_sources(${Tgt} PRIVATE ${CMAKE_CURRENT_BINARY_DIR}/${BaseName}_serialized.cpp) +endfunction() + + +if (MSVC AND (${CMAKE_VERSION} VERSION_LESS "3.21.0")) + message(FATAL_ERROR "Build with MSVC-compatible toolchain requires at least cmake 3.21.0 because of used TARGET_OBJECTS feature") +endif() + +function(add_global_library_for TgtName MainName) + if (MSVC) + add_library(${TgtName} OBJECT ${ARGN}) + add_dependencies(${TgtName} ${MainName}) # needed because object library can use some extra generated files in MainName + target_link_libraries(${MainName} INTERFACE ${TgtName} "$<TARGET_OBJECTS:${TgtName}>") + else() + add_library(${TgtName} STATIC ${ARGN}) + add_library(${TgtName}.wholearchive INTERFACE) + add_dependencies(${TgtName}.wholearchive ${TgtName}) + add_dependencies(${TgtName} ${MainName}) + if(APPLE) + target_link_options(${TgtName}.wholearchive INTERFACE "SHELL:-Wl,-force_load,$<TARGET_FILE:${TgtName}>") + else() + target_link_options(${TgtName}.wholearchive INTERFACE "SHELL:-Wl,--whole-archive $<TARGET_FILE:${TgtName}> -Wl,--no-whole-archive") + endif() + target_link_libraries(${MainName} INTERFACE ${TgtName}.wholearchive) + endif() +endfunction() + +function(copy_file From To) + add_custom_command( + OUTPUT ${To} + COMMAND ${CMAKE_COMMAND} -E copy ${From} ${To} + DEPENDS ${From} + ) +endfunction() + +function(vcs_info Tgt) + add_custom_command( + OUTPUT ${CMAKE_CURRENT_BINARY_DIR}/vcs_info.json + COMMAND Python3::Interpreter ${CMAKE_SOURCE_DIR}/build/scripts/generate_vcs_info.py ${CMAKE_CURRENT_BINARY_DIR}/vcs_info.json ${CMAKE_SOURCE_DIR} + DEPENDS ${CMAKE_SOURCE_DIR}/build/scripts/generate_vcs_info.py + ) + + add_custom_command( + OUTPUT ${CMAKE_CURRENT_BINARY_DIR}/__vcs_version__.c + COMMAND Python3::Interpreter ${CMAKE_SOURCE_DIR}/build/scripts/vcs_info.py ${CMAKE_CURRENT_BINARY_DIR}/vcs_info.json ${CMAKE_CURRENT_BINARY_DIR}/__vcs_version__.c ${CMAKE_SOURCE_DIR}/build/scripts/c_templates/svn_interface.c + DEPENDS ${CMAKE_SOURCE_DIR}/build/scripts/vcs_info.py ${CMAKE_SOURCE_DIR}/build/scripts/c_templates/svn_interface.c ${CMAKE_CURRENT_BINARY_DIR}/vcs_info.json + ) + target_sources(${Tgt} PRIVATE ${CMAKE_CURRENT_BINARY_DIR}/__vcs_version__.c) +endfunction() + +function(resources Tgt Output) + set(opts "") + set(oneval_args "") + set(multival_args INPUTS KEYS) + cmake_parse_arguments(RESOURCE_ARGS + "${opts}" + "${oneval_args}" + "${multival_args}" + ${ARGN} + ) + list(LENGTH RESOURCE_ARGS_INPUTS InputsCount) + list(LENGTH RESOURCE_ARGS_KEYS KeysCount) + if (NOT ${InputsCount} EQUAL ${KeysCount}) + message(FATAL_ERROR "Resources inputs count isn't equal to keys count in " ${Tgt}) + endif() + math(EXPR ListsMaxIdx "${InputsCount} - 1") + foreach(Idx RANGE ${ListsMaxIdx}) + list(GET RESOURCE_ARGS_INPUTS ${Idx} Input) + list(GET RESOURCE_ARGS_KEYS ${Idx} Key) + list(APPEND ResourcesList ${Input}) + list(APPEND ResourcesList ${Key}) + endforeach() + + get_built_tool_path(rescompiler_bin rescompiler_dependency tools/rescompiler/bin rescompiler) + + add_custom_command( + OUTPUT ${Output} + COMMAND ${rescompiler_bin} ${Output} ${ResourcesList} + DEPENDS ${RESOURCE_ARGS_INPUTS} ${rescompiler_dependency} + ) +endfunction() + +function(use_export_script Target ExportFile) + get_filename_component(OutName ${ExportFile} NAME) + set(OutPath ${CMAKE_CURRENT_BINARY_DIR}/gen_${OutName}) + + if (MSVC) + target_link_options(${Target} PRIVATE /DEF:${OutPath}) + set(EXPORT_SCRIPT_FLAVOR msvc) + elseif(APPLE) + execute_process( + COMMAND ${Python3_EXECUTABLE} ${CMAKE_SOURCE_DIR}/build/scripts/export_script_gen.py ${ExportFile} - --format darwin + RESULT_VARIABLE _SCRIPT_RES + OUTPUT_VARIABLE _SCRIPT_FLAGS + ERROR_VARIABLE _SCRIPT_STDERR + ) + if (NOT ${_SCRIPT_RES} EQUAL 0) + message(FATAL_ERROR "Failed to parse export symbols from ${ExportFile}:\n${_SCRIPT_STDERR}") + return() + endif() + separate_arguments(ParsedScriptFlags NATIVE_COMMAND ${_SCRIPT_FLAGS}) + target_link_options(${Target} PRIVATE ${ParsedScriptFlags}) + return() + else() + set(EXPORT_SCRIPT_FLAVOR gnu) + target_link_options(${Target} PRIVATE -Wl,--gc-sections -rdynamic -Wl,--version-script=${OutPath}) + endif() + + add_custom_command( + OUTPUT ${OutPath} + COMMAND + Python3::Interpreter ${CMAKE_SOURCE_DIR}/build/scripts/export_script_gen.py ${ExportFile} ${OutPath} --format ${EXPORT_SCRIPT_FLAVOR} + DEPENDS ${ExportFile} ${CMAKE_SOURCE_DIR}/build/scripts/export_script_gen.py + ) + target_sources(${Target} PRIVATE ${OutPath}) + set_property(SOURCE ${OutPath} PROPERTY + HEADER_FILE_ONLY On + ) + set_property(TARGET ${Target} APPEND PROPERTY + LINK_DEPENDS ${OutPath} + ) +endfunction() + +function(add_yunittest) + set(opts "") + set(oneval_args NAME TEST_TARGET) + set(multival_args TEST_ARG) + cmake_parse_arguments(YUNITTEST_ARGS + "${opts}" + "${oneval_args}" + "${multival_args}" + ${ARGN} + ) + get_property(SPLIT_FACTOR TARGET ${YUNITTEST_ARGS_TEST_TARGET} PROPERTY SPLIT_FACTOR) + if (${SPLIT_FACTOR} EQUAL 1) + add_test(NAME ${YUNITTEST_ARGS_NAME} COMMAND ${YUNITTEST_ARGS_TEST_TARGET} ${YUNITTEST_ARGS_TEST_ARG}) + return() + endif() + + math(EXPR LastIdx "${SPLIT_FACTOR} - 1") + foreach(Idx RANGE ${LastIdx}) + add_test(NAME ${YUNITTEST_ARGS_NAME}_${Idx} + COMMAND Python3::Interpreter ${CMAKE_SOURCE_DIR}/build/scripts/split_unittest.py --split-factor ${SPLIT_FACTOR} --shard ${Idx} + $<TARGET_FILE:${YUNITTEST_ARGS_TEST_TARGET}> ${YUNITTEST_ARGS_TEST_ARG}) + endforeach() +endfunction() + +function(set_yunittest_property) + set(opts "") + set(oneval_args TEST PROPERTY) + set(multival_args ) + cmake_parse_arguments(YUNITTEST_ARGS + "${opts}" + "${oneval_args}" + "${multival_args}" + ${ARGN} + ) + get_property(SPLIT_FACTOR TARGET ${YUNITTEST_ARGS_TEST} PROPERTY SPLIT_FACTOR) + + if (${SPLIT_FACTOR} EQUAL 1) + set_property(TEST ${YUNITTEST_ARGS_TEST} PROPERTY ${YUNITTEST_ARGS_PROPERTY} ${YUNITTEST_ARGS_UNPARSED_ARGUMENTS}) + return() + endif() + + math(EXPR LastIdx "${SPLIT_FACTOR} - 1") + foreach(Idx RANGE ${LastIdx}) + set_property(TEST ${YUNITTEST_ARGS_TEST}_${Idx} PROPERTY ${YUNITTEST_ARGS_PROPERTY} ${YUNITTEST_ARGS_UNPARSED_ARGUMENTS}) + endforeach() +endfunction() + +option(CUSTOM_ALLOCATORS "Enables use of per executable specified allocators. Can be turned off in order to use code instrumentation tooling relying on system allocator (sanitizers, heaptrack, ...)" On) +function(target_allocator Tgt) + if (CUSTOM_ALLOCATORS) + target_link_libraries(${Tgt} PRIVATE ${ARGN}) + else() + target_link_libraries(${Tgt} PRIVATE system_allocator) + endif() +endfunction() diff --git a/build/export_generators/hardcoded-cmake/cmake/global_flags.compiler.gnu.cmake b/build/export_generators/hardcoded-cmake/cmake/global_flags.compiler.gnu.cmake new file mode 100644 index 0000000000..3dcde4027f --- /dev/null +++ b/build/export_generators/hardcoded-cmake/cmake/global_flags.compiler.gnu.cmake @@ -0,0 +1,96 @@ +set(_GNU_COMMON_C_CXX_FLAGS "\ + -fexceptions \ + -fno-common \ + -fcolor-diagnostics \ + -faligned-allocation \ + -fdebug-default-version=4 \ + -ffunction-sections \ + -fdata-sections \ + -Wall \ + -Wextra \ + -Wno-parentheses \ + -Wno-implicit-const-int-float-conversion \ + -Wno-unknown-warning-option \ + -pipe \ + -D_THREAD_SAFE \ + -D_PTHREADS \ + -D_REENTRANT \ + -D_LARGEFILE_SOURCE \ + -D__STDC_CONSTANT_MACROS \ + -D__STDC_FORMAT_MACROS \ + -D__LONG_LONG_SUPPORTED \ +") + +if (CMAKE_SYSTEM_NAME MATCHES "^(Android|Linux)$") + string(APPEND _GNU_COMMON_C_CXX_FLAGS " -D_GNU_SOURCE") +endif() + +if (CMAKE_SYSTEM_NAME MATCHES "^(Darwin|Linux)$") + string(APPEND _GNU_COMMON_C_CXX_FLAGS " -DLIBCXX_BUILDING_LIBCXXRT") +endif() + +if (CMAKE_SYSTEM_NAME STREQUAL "Linux") + # Use .init_array instead of .ctors (default for old clang versions) + # See: https://maskray.me/blog/2021-11-07-init-ctors-init-array + string(APPEND _GNU_COMMON_C_CXX_FLAGS " -fuse-init-array") +endif() + +if (ANDROID) + include_directories(SYSTEM ${CMAKE_ANDROID_NDK}/sources/cxx-stl/llvm-libc++abi/include) + + # There is no usable _FILE_OFFSET_BITS=64 support in Androids until API 21. And it's incomplete until at least API 24. + # https://android.googlesource.com/platform/bionic/+/master/docs/32-bit-abi.md +else() + string(APPEND _GNU_COMMON_C_CXX_FLAGS " -D_FILE_OFFSET_BITS=64") +endif() + +if (CMAKE_SYSTEM_PROCESSOR MATCHES "^(arm.*|aarch64|ppc64le)") + string(APPEND _GNU_COMMON_C_CXX_FLAGS " -fsigned-char") +endif() + +if (CMAKE_SYSTEM_PROCESSOR MATCHES "^(i686|x86_64|AMD64)$") + if (CMAKE_SYSTEM_PROCESSOR STREQUAL "i686") + string(APPEND _GNU_COMMON_C_CXX_FLAGS " -m32") + elseif (CMAKE_SYSTEM_PROCESSOR MATCHES "^(x86_64|AMD64)$") + string(APPEND _GNU_COMMON_C_CXX_FLAGS " -m64") + endif() + string(APPEND _GNU_COMMON_C_CXX_FLAGS "\ + -msse2 \ + -msse3 \ + -mssse3 \ + ") + + if ((CMAKE_SYSTEM_PROCESSOR MATCHES "^(x86_64|AMD64)$") OR (NOT ANDROID)) + string(APPEND _GNU_COMMON_C_CXX_FLAGS "\ + -msse4.1 \ + -msse4.2 \ + -mpopcnt \ + ") + if (NOT ANDROID) + # older clang versions did not support this feature on Android: + # https://reviews.llvm.org/rGc32d307a49f5255602e7543e64e6c38a7f536abc + string(APPEND _GNU_COMMON_C_CXX_FLAGS " -mcx16") + endif() + endif() + + if (CMAKE_SYSTEM_NAME STREQUAL "Linux") + string(APPEND _GNU_COMMON_C_CXX_FLAGS " -D_YNDX_LIBUNWIND_ENABLE_EXCEPTION_BACKTRACE") + endif() +elseif (ANDROID AND (CMAKE_ANDROID_ARCH_ABI STREQUAL "armeabi-v7a")) + string(APPEND _GNU_COMMON_C_CXX_FLAGS " -mfloat-abi=softfp") +endif() + +set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${_GNU_COMMON_C_CXX_FLAGS}") +set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${_GNU_COMMON_C_CXX_FLAGS} \ + -Woverloaded-virtual \ + -Wimport-preprocessor-directive-pedantic \ + -Wno-undefined-var-template \ + -Wno-return-std-move \ + -Wno-defaulted-function-deleted \ + -Wno-pessimizing-move \ + -Wno-deprecated-anon-enum-enum-conversion \ + -Wno-deprecated-enum-enum-conversion \ + -Wno-deprecated-enum-float-conversion \ + -Wno-ambiguous-reversed-operator \ + -Wno-deprecated-volatile \ +") diff --git a/build/export_generators/hardcoded-cmake/cmake/global_flags.compiler.msvc.cmake b/build/export_generators/hardcoded-cmake/cmake/global_flags.compiler.msvc.cmake new file mode 100644 index 0000000000..ffbd2e033c --- /dev/null +++ b/build/export_generators/hardcoded-cmake/cmake/global_flags.compiler.msvc.cmake @@ -0,0 +1,144 @@ +set(_WARNS_ENABLED + 4018 # 'expression' : signed/unsigned mismatch + 4265 # 'class' : class has virtual functions, but destructor is not virtual + 4296 # 'operator' : expression is always false + 4431 # missing type specifier - int assumed +) + +set(_WARNS_AS_ERROR + 4013 # 'function' undefined; assuming extern returning int +) + +set(_WARNS_DISABLED + # While this warning corresponds to enabled-by-default -Wmacro-redefinition, + # it floods clog with abundant amount of log lines, + # as yvals_core.h from Windows SDK redefines certain + # which macros logically belong to libcxx + 4005 # '__cpp_lib_*': macro redefinition. + + # Ne need to recheck this, but it looks like _CRT_USE_BUILTIN_OFFSETOF still makes sense + 4117 # macro name '_CRT_USE_BUILTIN_OFFSETOF' is reserved, '#define' ignored + + 4127 # conditional expression is constant + 4200 # nonstandard extension used : zero-sized array in struct/union + 4201 # nonstandard extension used : nameless struct/union + 4351 # elements of array will be default initialized + 4355 # 'this' : used in base member initializer list + 4503 # decorated name length exceeded, name was truncated + 4510 # default constructor could not be generated + 4511 # copy constructor could not be generated + 4512 # assignment operator could not be generated + 4554 # check operator precedence for possible error; use parentheses to clarify precedence + 4610 # 'object' can never be instantiated - user defined constructor required + 4706 # assignment within conditional expression + 4800 # forcing value to bool 'true' or 'false' (performance warning) + 4996 # The POSIX name for this item is deprecated + 4714 # function marked as __forceinline not inlined + 4197 # 'TAtomic' : top-level volatile in cast is ignored + 4245 # 'initializing' : conversion from 'int' to 'ui32', signed/unsigned mismatch + 4324 # 'ystd::function<void (uint8_t *)>': structure was padded due to alignment specifier + 5033 # 'register' is no longer a supported storage class +) + +set (_MSVC_COMMON_C_CXX_FLAGS " \ + /DWIN32 \ + /D_WIN32 \ + /D_WINDOWS \ + /D_CRT_SECURE_NO_WARNINGS \ + /D_CRT_NONSTDC_NO_WARNINGS \ + /D_USE_MATH_DEFINES \ + /D__STDC_CONSTANT_MACROS \ + /D__STDC_FORMAT_MACROS \ + /D_USING_V110_SDK71_ \ + /DWIN32_LEAN_AND_MEAN \ + /DNOMINMAX \ + /nologo \ + /Zm500 \ + /GR \ + /bigobj \ + /FC \ + /EHs \ + /errorReport:prompt \ + /Zc:inline \ + /utf-8 \ + /permissive- \ + /D_WIN32_WINNT=0x0601 \ + /D_MBCS \ + /MP \ +") + +if (CMAKE_GENERATOR MATCHES "Visual.Studio.*") + string(APPEND _MSVC_COMMON_C_CXX_FLAGS "\ + /DY_UCRT_INCLUDE=\"$(UniversalCRT_IncludePath.Split(';')[0].Replace('\\','/'))\" \ + /DY_MSVC_INCLUDE=\"$(VC_VC_IncludePath.Split(';')[0].Replace('\\','/'))\" \ + ") +else() + set(UCRT_INCLUDE_FOUND false) + foreach(INCLUDE_PATH $ENV{INCLUDE}) + if (INCLUDE_PATH MATCHES ".*\\\\Windows Kits\\\\[0-9]+\\\\include\\\\[0-9\\.]+\\\\ucrt$") + message(VERBOSE "Found Y_UCRT_INCLUDE path \"${INCLUDE_PATH}\"") + string(REPLACE "\\" "/" SAFE_INCLUDE_PATH "${INCLUDE_PATH}") + string(APPEND _MSVC_COMMON_C_CXX_FLAGS " /DY_UCRT_INCLUDE=\"${SAFE_INCLUDE_PATH}\"") + set(UCRT_INCLUDE_FOUND true) + break() + endif() + endforeach() + if (NOT UCRT_INCLUDE_FOUND) + message(FATAL_ERROR "UniversalCRT include path not found, please add it to the standard INCLUDE environment variable (most likely by calling vcvars64.bat)") + endif() + + set(MSVC_INCLUDE_FOUND false) + foreach(INCLUDE_PATH $ENV{INCLUDE}) + if (INCLUDE_PATH MATCHES ".*VC\\\\Tools\\\\MSVC\\\\[0-9\\.]+\\\\include$") + message(VERBOSE "Found Y_MSVC_INCLUDE path \"${INCLUDE_PATH}\"") + string(REPLACE "\\" "/" SAFE_INCLUDE_PATH "${INCLUDE_PATH}") + string(APPEND _MSVC_COMMON_C_CXX_FLAGS " /DY_MSVC_INCLUDE=\"${SAFE_INCLUDE_PATH}\"") + set(MSVC_INCLUDE_FOUND true) + break() + endif() + endforeach() + if (NOT MSVC_INCLUDE_FOUND) + message(FATAL_ERROR "MSVC include path not found, please add it to the standard INCLUDE environment variable (most likely by calling vcvars64.bat)") + endif() +endif() + +foreach(WARN ${_WARNS_AS_ERROR}) + string(APPEND _MSVC_COMMON_C_CXX_FLAGS " /we${WARN}") +endforeach() + +foreach(WARN ${_WARNS_ENABLED}) + string(APPEND _MSVC_COMMON_C_CXX_FLAGS " /w1${WARN}") +endforeach() + +foreach(WARN ${_WARNS_DISABLED}) + string(APPEND _MSVC_COMMON_C_CXX_FLAGS " /wd${WARN}") +endforeach() + +if (CMAKE_SYSTEM_PROCESSOR MATCHES "^(x86_64|AMD64)$") + string(APPEND _MSVC_COMMON_C_CXX_FLAGS " \ + /D_WIN64 \ + /DWIN64 \ + /D__SSE2__ \ + /D__SSE3__ \ + /D__SSSE3__ \ + /D__SSE4_1__ \ + /D__SSE4_2__ \ + /D__POPCNT__ \ + ") +endif() + +set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${_MSVC_COMMON_C_CXX_FLAGS} \ +") + +# TODO - '/D_CRT_USE_BUILTIN_OFFSETOF' +# TODO - -DUSE_STL_SYSTEM + +set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${_MSVC_COMMON_C_CXX_FLAGS} \ + /std:c++latest \ + /Zc:__cplusplus \ +") + +set(CMAKE_CXX_FLAGS_DEBUG "/Z7 /Ob0 /Od /D_DEBUG") +set(CMAKE_CXX_FLAGS_MINSIZEREL "/O1 /Ob1 /DNDEBUG") +set(CMAKE_CXX_FLAGS_RELEASE "/Ox /Ob2 /Oi /DNDEBUG") +set(CMAKE_CXX_FLAGS_RELWITHDEBINFO "/Z7 /Ox /Ob1 /DNDEBUG") diff --git a/build/export_generators/hardcoded-cmake/cmake/global_flags.linker.gnu.cmake b/build/export_generators/hardcoded-cmake/cmake/global_flags.linker.gnu.cmake new file mode 100644 index 0000000000..5b32a8868f --- /dev/null +++ b/build/export_generators/hardcoded-cmake/cmake/global_flags.linker.gnu.cmake @@ -0,0 +1,25 @@ +if (ANDROID) + # NDK r23 onwards has stopped using libgcc: + # - https://github.com/android/ndk/wiki/Changelog-r23#changes + # - https://github.com/android/ndk/issues/1230 + # LLVM's libunwind is now used instead of libgcc for all architectures rather than just 32-bit Arm. + # - https://github.com/android/ndk/issues/1231 + # LLVM's libclang_rt.builtins is now used instead of libgcc. + if (CMAKE_ANDROID_NDK_VERSION GREATER_EQUAL 23) + # Use toolchain defaults to link with libunwind/clang_rt.builtins + add_link_options("-nostdlib++") + else () + # Preserve old behaviour: specify runtime libs manually + add_link_options(-nodefaultlibs) + link_libraries(gcc) + if (CMAKE_ANDROID_ARCH_ABI STREQUAL "armeabi-v7a") + link_libraries(unwind) + endif() + endif() +elseif (CMAKE_SYSTEM_NAME MATCHES "^(Darwin|Linux)$") + add_link_options("-nodefaultlibs") +endif() + +if (APPLE) + set(CMAKE_SHARED_LINKER_FLAGS "-undefined dynamic_lookup") +endif() diff --git a/build/export_generators/hardcoded-cmake/cmake/global_flags.linker.msvc.cmake b/build/export_generators/hardcoded-cmake/cmake/global_flags.linker.msvc.cmake new file mode 100644 index 0000000000..d70ff2c3f1 --- /dev/null +++ b/build/export_generators/hardcoded-cmake/cmake/global_flags.linker.msvc.cmake @@ -0,0 +1,16 @@ +cmake_policy(SET CMP0091 NEW) +set(CMAKE_MSVC_RUNTIME_LIBRARY "MultiThreaded$<$<CONFIG:Debug>:Debug>") + +add_link_options( + /NOLOGO + /ERRORREPORT:PROMPT + /SUBSYSTEM:CONSOLE + /TLBID:1 + /NXCOMPAT + /IGNORE:4221 + /INCREMENTAL +) + +if ((CMAKE_SYSTEM_PROCESSOR STREQUAL "x86_64") OR (CMAKE_SYSTEM_PROCESSOR STREQUAL "AMD64")) + add_link_options(/MACHINE:X64) +endif() diff --git a/build/export_generators/hardcoded-cmake/generator.toml b/build/export_generators/hardcoded-cmake/generator.toml new file mode 100644 index 0000000000..b8c41e8b36 --- /dev/null +++ b/build/export_generators/hardcoded-cmake/generator.toml @@ -0,0 +1,47 @@ +[root] + +copy=[ + "cmake/global_flags.compiler.gnu.cmake", + "cmake/global_flags.compiler.msvc.cmake", + "cmake/global_flags.linker.gnu.cmake", + "cmake/global_flags.linker.msvc.cmake" +] + + +[attrs.target] +vcs_info = {type="list", copy=[ + "cmake/common.cmake", + "scripts/export_script_gen.py", + "scripts/split_unittest.py", + "scripts/generate_vcs_info.py" +]} +target_joined_source = {type="list", copy=[ + "cmake/common.cmake", + "scripts/export_script_gen.py", + "scripts/split_unittest.py", + "scripts/generate_vcs_info.py" +]} +target_sources_custom = {type="list", copy=[ + "cmake/common.cmake", + "scripts/export_script_gen.py", + "scripts/split_unittest.py", + "scripts/generate_vcs_info.py" +]} +target_ragel_lexers = {type="list", copy=[ + "cmake/common.cmake", + "scripts/export_script_gen.py", + "scripts/split_unittest.py", + "scripts/generate_vcs_info.py" +]} +target_yasm_source = {type="list", copy=[ + "cmake/common.cmake", + "scripts/export_script_gen.py", + "scripts/split_unittest.py", + "scripts/generate_vcs_info.py" +]} +add_global_library_for = {type="list", copy=[ + "cmake/common.cmake", + "scripts/export_script_gen.py", + "scripts/split_unittest.py", + "scripts/generate_vcs_info.py" +]} diff --git a/build/export_generators/hardcoded-cmake/scripts/export_script_gen.py b/build/export_generators/hardcoded-cmake/scripts/export_script_gen.py new file mode 100644 index 0000000000..6bf68fef7c --- /dev/null +++ b/build/export_generators/hardcoded-cmake/scripts/export_script_gen.py @@ -0,0 +1,126 @@ +import argparse +import collections +import sys + + +def parse_export_file(src): + for line in src: + line = line.strip() + + if line and '#' not in line: + words = line.split() + if len(words) == 2 and words[0] == 'linux_version': + yield {'linux_version': words[1]} + elif len(words) == 2: + yield {'lang': words[0], 'sym': words[1]} + elif len(words) == 1: + yield {'lang': 'C', 'sym': words[0]} + else: + raise Exception('unsupported exports line: "{}"'.format(line)) + + +def to_c(sym): + symbols = collections.deque(sym.split('::')) + c_prefixes = [ # demangle prefixes for c++ symbols + '_ZN', # namespace + '_ZTIN', # typeinfo for + '_ZTSN', # typeinfo name for + '_ZTTN', # VTT for + '_ZTVN', # vtable for + '_ZNK', # const methods + ] + c_sym = '' + while symbols: + s = symbols.popleft() + if s == '*': + c_sym += '*' + break + if '*' in s and len(s) > 1: + raise Exception('Unsupported format, cannot guess length of symbol: ' + s) + c_sym += str(len(s)) + s + if symbols: + raise Exception('Unsupported format: ' + sym) + if c_sym[-1] != '*': + c_sym += 'E*' + return ['{prefix}{sym}'.format(prefix=prefix, sym=c_sym) for prefix in c_prefixes] + + +def to_gnu(src, dest): + d = collections.defaultdict(list) + version = None + for item in parse_export_file(src): + if item.get('linux_version'): + if not version: + version = item.get('linux_version') + else: + raise Exception('More than one linux_version defined') + elif item['lang'] == 'C++': + d['C'].extend(to_c(item['sym'])) + else: + d[item['lang']].append(item['sym']) + + if version: + dest.write('{} {{\nglobal:\n'.format(version)) + else: + dest.write('{\nglobal:\n') + + for k, v in d.items(): + dest.write(' extern "' + k + '" {\n') + + for x in v: + dest.write(' ' + x + ';\n') + + dest.write(' };\n') + + dest.write('local: *;\n};\n') + + +def to_msvc(src, dest): + dest.write('EXPORTS\n') + for item in parse_export_file(src): + if item.get('linux_version'): + continue + if item.get('lang') == 'C': + dest.write(' {}\n'.format(item.get('sym'))) + + +def to_darwin(src, dest): + pre = '' + for item in parse_export_file(src): + if item.get('linux_version'): + continue + + if item['lang'] == 'C': + dest.write(pre + '-Wl,-exported_symbol,_' + item['sym']) + elif item['lang'] == 'C++': + for sym in to_c(item['sym']): + dest.write(pre + '-Wl,-exported_symbol,_' + sym) + else: + raise Exception('unsupported lang: ' + item['lang']) + if pre == '': + pre = ' ' + + +def main(): + parser = argparse.ArgumentParser(description='Convert self-invented platform independent export file format to the format required by specific linker') + parser.add_argument('src', type=argparse.FileType('r', encoding='UTF-8'), help='platform independent export file path') + parser.add_argument('dest', type=argparse.FileType('w', encoding='UTF-8'), help='destination export file for required linker') + parser.add_argument('--format', help='destination file type format: gnu, msvc or darwin') + + args = parser.parse_args() + if args.format == 'gnu': + to_gnu(args.src, args.dest) + elif args.format == 'msvc': + to_msvc(args.src, args.dest) + elif args.format == 'darwin': + to_darwin(args.src, args.dest) + else: + print('Unknown destination file format: {}'.format(args.format), file=sys.stderr) + sys.exit(1) + + args.src.close() + args.dest.close() + + +if __name__ == '__main__': + main() diff --git a/build/export_generators/hardcoded-cmake/scripts/generate_vcs_info.py b/build/export_generators/hardcoded-cmake/scripts/generate_vcs_info.py new file mode 100644 index 0000000000..770d2ec802 --- /dev/null +++ b/build/export_generators/hardcoded-cmake/scripts/generate_vcs_info.py @@ -0,0 +1,321 @@ +# coding: utf-8 +import json +import locale +import re +import os +import subprocess +import sys +import time +import six as six_ + + +INDENT = " " * 4 + + +def _get_vcs_dictionary(vcs_type, *arg): + if vcs_type == 'git': + return _GitVersion.parse(*arg) + else: + raise Exception("Unknown VCS type {}".format(str(vcs_type))) + + +def _get_user_locale(): + try: + if six_.PY3: + return [locale.getencoding()] + else: + return [locale.getdefaultlocale()[1]] + except Exception: + return [] + + +class _GitVersion(): + @classmethod + def parse(cls, commit_hash, author_info, summary_info, body_info, tag_info, branch_info, depth=None): + r""" Parses output of + git rev-parse HEAD + git log -1 --format='format:%an <%ae>' + git log -1 --format='format:%s' + git log -1 --grep='^git-svn-id: ' --format='format:%b' or + git log -1 --grep='^Revision: r?\d*' --format='format:%b + git describe --exact-match --tags HEAD + git describe --exact-match --all HEAD + and depth as computed by _get_git_depth + '""" + + info = {} + info['hash'] = commit_hash + info['commit_author'] = _SystemInfo._to_text(author_info) + info['summary'] = _SystemInfo._to_text(summary_info) + + if 'svn_commit_revision' not in info: + url = re.search("git?-svn?-id: (.*)@(\\d*).*", body_info) + if url: + info['svn_url'] = url.group(1) + info['svn_commit_revision'] = int(url.group(2)) + + if 'svn_commit_revision' not in info: + rev = re.search('Revision: r?(\\d*).*', body_info) + if rev: + info['svn_commit_revision'] = int(rev.group(1)) + + info['tag'] = tag_info + info['branch'] = branch_info + info['scm_text'] = cls._format_scm_data(info) + info['vcs'] = 'git' + + if depth: + info['patch_number'] = int(depth) + return info + + @staticmethod + def _format_scm_data(info): + scm_data = "Git info:\n" + scm_data += INDENT + "Commit: " + info['hash'] + "\n" + scm_data += INDENT + "Branch: " + info['branch'] + "\n" + scm_data += INDENT + "Author: " + info['commit_author'] + "\n" + scm_data += INDENT + "Summary: " + info['summary'] + "\n" + if 'svn_commit_revision' in info or 'svn_url' in info: + scm_data += INDENT + "git-svn info:\n" + if 'svn_url' in info: + scm_data += INDENT + "URL: " + info['svn_url'] + "\n" + if 'svn_commit_revision' in info: + scm_data += INDENT + "Last Changed Rev: " + str(info['svn_commit_revision']) + "\n" + return scm_data + + @staticmethod + def external_data(arc_root): + env = os.environ.copy() + env['TZ'] = '' + + hash_args = ['rev-parse', 'HEAD'] + author_args = ['log', '-1', '--format=format:%an <%ae>'] + summary_args = ['log', '-1', '--format=format:%s'] + svn_args = ['log', '-1', '--grep=^git-svn-id: ', '--format=format:%b'] + svn_args_alt = ['log', '-1', '--grep=^Revision: r\\?\\d*', '--format=format:%b'] + tag_args = ['describe', '--exact-match', '--tags', 'HEAD'] + branch_args = ['describe', '--exact-match', '--all', 'HEAD'] + + # using local 'Popen' wrapper + commit = _SystemInfo._system_command_call(['git'] + hash_args, env=env, cwd=arc_root).rstrip() + author = _SystemInfo._system_command_call(['git'] + author_args, env=env, cwd=arc_root) + commit = _SystemInfo._system_command_call(['git'] + hash_args, env=env, cwd=arc_root).rstrip() + author = _SystemInfo._system_command_call(['git'] + author_args, env=env, cwd=arc_root) + summary = _SystemInfo._system_command_call(['git'] + summary_args, env=env, cwd=arc_root) + svn_id = _SystemInfo._system_command_call(['git'] + svn_args, env=env, cwd=arc_root) + if not svn_id: + svn_id = _SystemInfo._system_command_call(['git'] + svn_args_alt, env=env, cwd=arc_root) + + try: + tag_info = _SystemInfo._system_command_call(['git'] + tag_args, env=env, cwd=arc_root).splitlines() + except Exception: + tag_info = [''.encode('utf-8')] + + try: + branch_info = _SystemInfo._system_command_call(['git'] + branch_args, env=env, cwd=arc_root).splitlines() + except Exception: + branch_info = [''.encode('utf-8')] + + depth = six_.text_type(_GitVersion._get_git_depth(env, arc_root)).encode('utf-8') + + # logger.debug('Git info commit:{}, author:{}, summary:{}, svn_id:{}'.format(commit, author, summary, svn_id)) + return [commit, author, summary, svn_id, tag_info[0], branch_info[0], depth] + + # YT's patch number. + @staticmethod + def _get_git_depth(env, arc_root): + graph = {} + full_history_args = ["log", "--full-history", "--format=%H %P", "HEAD"] + history = _SystemInfo._system_command_call(['git'] + full_history_args, env=env, cwd=arc_root).decode('utf-8') + + head = None + for line in history.splitlines(): + values = line.split() + if values: + if head is None: + head = values[0] + graph[values[0]] = values[1:] + + assert head + cache = {} + stack = [(head, None, False)] + while stack: + commit, child, calculated = stack.pop() + if commit in cache: + calculated = True + if calculated: + if child is not None: + cache[child] = max(cache.get(child, 0), cache[commit] + 1) + else: + stack.append((commit, child, True)) + parents = graph[commit] + if not parents: + cache[commit] = 0 + else: + for parent in parents: + stack.append((parent, commit, False)) + return cache[head] + + +class _SystemInfo: + LOCALE_LIST = _get_user_locale() + [sys.getfilesystemencoding(), 'utf-8'] + + @classmethod + def get_locale(cls): + import codecs + for i in cls.LOCALE_LIST: + if not i: + continue + try: + codecs.lookup(i) + return i + except LookupError: + continue + + @staticmethod + def _to_text(s): + if isinstance(s, six_.binary_type): + return s.decode(_SystemInfo.get_locale(), errors='replace') + return s + + @staticmethod + def get_user(): + sys_user = os.environ.get("USER") + if not sys_user: + sys_user = os.environ.get("USERNAME") + if not sys_user: + sys_user = os.environ.get("LOGNAME") + if not sys_user: + sys_user = "Unknown user" + return sys_user + + @staticmethod + def get_date(stamp=None): + # Format compatible with SVN-xml format. + return time.strftime("%Y-%m-%dT%H:%M:%S.000000Z", time.gmtime(stamp)) + + @staticmethod + def get_timestamp(): + # Unix timestamp. + return int(time.time()) + + @staticmethod + def get_other_data(src_dir, data_file='local.ymake'): + other_data = "Other info:\n" + other_data += INDENT + "Build by: " + _SystemInfo.get_user() + "\n" + other_data += INDENT + "Top src dir: {}\n".format(src_dir) + + # logger.debug("Other data: %s", other_data) + + return other_data + + @staticmethod + def _get_host_info(fake_build_info=False): + if fake_build_info: + host_info = '*sys localhost 1.0.0 #dummy information ' + elif not on_win(): + host_info = ' '.join(os.uname()) + else: + host_info = _SystemInfo._system_command_call("VER") # XXX: check shell from cygwin to call VER this way! + return INDENT + INDENT + host_info.strip() + "\n" if host_info else "" + + @staticmethod + def _system_command_call(command, **kwargs): + if isinstance(command, list): + command = subprocess.list2cmdline(command) + try: + process = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True, **kwargs) + stdout, stderr = process.communicate() + if process.returncode != 0: + # logger.debug('{}\nRunning {} failed with exit code {}\n'.format(stderr, command, process.returncode)) + raise get_svn_exception()(stdout=stdout, stderr=stderr, rc=process.returncode, cmd=[command]) + return stdout + except OSError as e: + msg = e.strerror + errcodes = 'error {}'.format(e.errno) + if on_win() and isinstance(e, WindowsError): + errcodes += ', win-error {}'.format(e.winerror) + try: + import ctypes + msg = six_.text_type(ctypes.FormatError(e.winerror), _SystemInfo.get_locale()).encode('utf-8') + except ImportError: + pass + # logger.debug('System command call {} failed [{}]: {}\n'.format(command, errcodes, msg)) + return None + + +def _get_raw_data(vcs_type, vcs_root): + lines = [] + if vcs_type == 'git': + lines = _GitVersion.external_data(vcs_root) + + return [l.decode('utf-8') for l in lines] + + +def _get_json(vcs_root): + try: + vcs_type = "git" + info = _get_vcs_dictionary(vcs_type, *_get_raw_data(vcs_type, vcs_root)) + return info, vcs_root + except Exception: + return None, "" + + +def _dump_json( + arc_root, info, + other_data=None, + build_user=None, + build_date=None, + build_timestamp=0, + custom_version='', +): + j = {} + j['PROGRAM_VERSION'] = info['scm_text'] + "\n" + _SystemInfo._to_text(other_data) + j['CUSTOM_VERSION'] = str(_SystemInfo._to_text(custom_version)) + j['SCM_DATA'] = info['scm_text'] + j['ARCADIA_SOURCE_PATH'] = _SystemInfo._to_text(arc_root) + j['ARCADIA_SOURCE_URL'] = info.get('url', info.get('svn_url', '')) + j['ARCADIA_SOURCE_REVISION'] = info.get('revision', -1) + j['ARCADIA_SOURCE_HG_HASH'] = info.get('hash', '') + j['ARCADIA_SOURCE_LAST_CHANGE'] = info.get('commit_revision', info.get('svn_commit_revision', -1)) + j['ARCADIA_SOURCE_LAST_AUTHOR'] = info.get('commit_author', '') + j['ARCADIA_PATCH_NUMBER'] = info.get('patch_number', 0) + j['BUILD_USER'] = _SystemInfo._to_text(build_user) + j['VCS'] = info.get('vcs', '') + j['BRANCH'] = info.get('branch', '') + j['ARCADIA_TAG'] = info.get('tag', '') + j['DIRTY'] = info.get('dirty', '') + + if 'url' in info or 'svn_url' in info: + j['SVN_REVISION'] = info.get('svn_commit_revision', info.get('revision', -1)) + j['SVN_ARCROOT'] = info.get('url', info.get('svn_url', '')) + j['SVN_TIME'] = info.get('commit_date', info.get('svn_commit_date', '')) + + j['BUILD_DATE'] = build_date + j['BUILD_TIMESTAMP'] = build_timestamp + + return json.dumps(j, sort_keys=True, indent=4, separators=(',', ': ')) + + +def get_version_info(arc_root, custom_version=""): + info, vcs_root = _get_json(arc_root) + if info is None: + return "" + + return _dump_json( + vcs_root, + info, + other_data=_SystemInfo.get_other_data( + src_dir=vcs_root, + ), + build_user=_SystemInfo.get_user(), + build_date=_SystemInfo.get_date(None), + build_timestamp=_SystemInfo.get_timestamp(), + custom_version=custom_version, + ) + + +if __name__ == '__main__': + with open(sys.argv[1], 'w') as f: + f.write(get_version_info(sys.argv[2])) + diff --git a/build/export_generators/hardcoded-cmake/scripts/split_unittest.py b/build/export_generators/hardcoded-cmake/scripts/split_unittest.py new file mode 100644 index 0000000000..8874b8b915 --- /dev/null +++ b/build/export_generators/hardcoded-cmake/scripts/split_unittest.py @@ -0,0 +1,80 @@ +import argparse +import tempfile +import shlex +import subprocess + + +def parse_args(): + parser = argparse.ArgumentParser() + parser.add_argument("--split-factor", type=int, default=0) + parser.add_argument("--shard", type=int, default=0) + parser.add_argument("--fork-mode", type=str, default="SEQUENTIAL") + parser.add_argument("command", nargs=argparse.REMAINDER) + return parser.parse_args() + + +def get_sequential_chunk(tests, modulo, modulo_index): + chunk_size = len(tests) // modulo + not_used = len(tests) % modulo + shift = chunk_size + (modulo_index < not_used) + start = chunk_size * modulo_index + min(modulo_index, not_used) + end = start + shift + return [] if end > len(tests) else tests[start:end] + + +def get_shuffled_chunk(tests, modulo, modulo_index): + result_tests = [] + for i, test in enumerate(tests): + if i % modulo == modulo_index: + result_tests.append(test) + return result_tests + + +def list_tests(binary): + with tempfile.NamedTemporaryFile() as tmpfile: + cmd = [binary, "--list-verbose", "--list-path", tmpfile.name] + subprocess.check_call(cmd) + + with open(tmpfile.name) as afile: + lines = afile.read().strip().split("\n") + lines = [x.strip() for x in lines] + return [x for x in lines if x] + + +def get_shard_tests(args): + test_names = list_tests(args.command[0]) + test_names = sorted(test_names) + + if args.fork_mode == "MODULO": + return get_shuffled_chunk(test_names, args.split_factor, args.shard) + elif args.fork_mode == "SEQUENTIAL": + return get_sequential_chunk(test_names, args.split_factor, args.shard) + else: + raise ValueError("detected unknown partition mode: {}".format(args.fork_mode)) + + +def get_shard_cmd_args(args): + return ["+{}".format(x) for x in get_shard_tests(args)] + + +def main(): + args = parse_args() + + if args.split_factor: + shard_cmd = get_shard_cmd_args(args) + if shard_cmd: + cmd = args.command + shard_cmd + else: + print("No tests for {} shard".format(args.shard)) + return 0 + else: + cmd = args.command + + rc = subprocess.call(cmd) + if rc: + print("Some tests failed. To reproduce run: {}".format(shlex.join(cmd))) + return rc + + +if __name__ == "__main__": + exit(main()) |