diff --git a/README.md b/README.md index bed321f2..e0edac42 100644 --- a/README.md +++ b/README.md @@ -136,7 +136,7 @@ sudo apt install lcov Go to the directory that contains the `MODULE.bazel` file and execute: ```shell -./coverage.sh buchgr_remote_cache +./coverage.sh --additonal_bazel_config=buchgr_remote_cache open coverage_report/index.html ``` diff --git a/devertexwahn/.bazelrc b/devertexwahn/.bazelrc index 0b23fbf2..02ce81b3 100644 --- a/devertexwahn/.bazelrc +++ b/devertexwahn/.bazelrc @@ -159,8 +159,14 @@ build:clang-tidy --cxxopt=-std=c++17 build:optimized build --copt=-O3 # IWYU -build:iwyu --aspects @com_github_storypku_bazel_iwyu//bazel/iwyu:iwyu.bzl%iwyu_aspect +# More details here: https://github.com/storypku/bazel_iwyu/pull/12 +#build:iwyu --@bazel_iwyu//:iwyu_mappings=@my_repo//tools:iwyu_mappings +build:iwyu --config=gcc11 +#build:iwyu --config=clang16 +build:iwyu --aspects @bazel_iwyu//bazel/iwyu:iwyu.bzl%iwyu_aspect build:iwyu --output_groups=report +build:iwyu --build_tag_filters=-no-iwyu # To include everything except the rules tagged with "no-iwyu" +build:iwyu --@bazel_iwyu//:iwyu_opts=--verbose=3,--no_fwd_decls,--cxx17ns,--max_line_length=127 try-import %workspace%/.bazelrc.remote_cache try-import %workspace%/.bazelrc.build_meta diff --git a/devertexwahn/.bazelversion b/devertexwahn/.bazelversion index b26a34e4..15020207 100644 --- a/devertexwahn/.bazelversion +++ b/devertexwahn/.bazelversion @@ -1 +1 @@ -7.2.1 +7.3.0 diff --git a/devertexwahn/ci/macOS-12-apple-clang14.yaml b/devertexwahn/ci/macOS-12-apple-clang14.yaml index 7232f25d..cdcf7eb2 100644 --- a/devertexwahn/ci/macOS-12-apple-clang14.yaml +++ b/devertexwahn/ci/macOS-12-apple-clang14.yaml @@ -6,12 +6,20 @@ # Apple Clang 14 on macOS 12 # Make sure we can build with Apple Clang13 in all different compilation modes steps: + - script: | + df -h + displayName: 'Show disk space' + - checkout: self clean: true fetchDepth: 1 lfs: false submodules: false + - script: | + df -h + displayName: 'Show disk space' + - script: | sw_vers uname diff --git a/devertexwahn/ci/macOS-12-clang14.yaml b/devertexwahn/ci/macOS-12-clang14.yaml index 51858dd9..42c40df7 100644 --- a/devertexwahn/ci/macOS-12-clang14.yaml +++ b/devertexwahn/ci/macOS-12-clang14.yaml @@ -6,12 +6,20 @@ # Apple Clang 13 on macOS 12 # Make sure we can build with Apple Clang13 in all different compilation modes steps: + - script: | + df -h + displayName: 'Show disk space' + - checkout: self clean: true fetchDepth: 1 lfs: false submodules: false + - script: | + df -h + displayName: 'Show disk space' + - script: | sw_vers diff --git a/devertexwahn/ci/macOS-13-apple-clang14.yaml b/devertexwahn/ci/macOS-13-apple-clang14.yaml index c64140a3..a07ba0a0 100644 --- a/devertexwahn/ci/macOS-13-apple-clang14.yaml +++ b/devertexwahn/ci/macOS-13-apple-clang14.yaml @@ -6,12 +6,20 @@ # Apple Clang 14 on macOS 13 # Make sure we can build with Apple Clang13 in all different compilation modes steps: + - script: | + df -h + displayName: 'Show disk space' + - checkout: self clean: true fetchDepth: 1 lfs: false submodules: false + - script: | + df -h + displayName: 'Show disk space' + - script: | sw_vers uname diff --git a/devertexwahn/ci/macOS-14-apple-clang14.yaml b/devertexwahn/ci/macOS-14-apple-clang14.yaml index 9c65c9d8..a27f932e 100644 --- a/devertexwahn/ci/macOS-14-apple-clang14.yaml +++ b/devertexwahn/ci/macOS-14-apple-clang14.yaml @@ -6,12 +6,20 @@ # Apple Clang 14 on macOS 14 # Make sure we can build with Apple Clang13 in all different compilation modes steps: + - script: | + df -h + displayName: 'Show disk space' + - checkout: self clean: true fetchDepth: 1 lfs: false submodules: false + - script: | + df -h + displayName: 'Show disk space' + - script: | sw_vers uname diff --git a/devertexwahn/ci/ubuntu-20.04-clang14.yaml b/devertexwahn/ci/ubuntu-20.04-clang14.yaml index c535e429..fff5d038 100644 --- a/devertexwahn/ci/ubuntu-20.04-clang14.yaml +++ b/devertexwahn/ci/ubuntu-20.04-clang14.yaml @@ -17,6 +17,7 @@ steps: submodules: false + - script: | lsb_release -a displayName: 'Show OS version' diff --git a/devertexwahn/ci/ubuntu-20.04-gcc9-dbg-part1.yaml b/devertexwahn/ci/ubuntu-20.04-gcc9-dbg-part1.yaml index bc56c234..03f02c81 100644 --- a/devertexwahn/ci/ubuntu-20.04-gcc9-dbg-part1.yaml +++ b/devertexwahn/ci/ubuntu-20.04-gcc9-dbg-part1.yaml @@ -18,6 +18,7 @@ steps: submodules: false + - script: | lsb_release -a displayName: 'Show OS version' diff --git a/devertexwahn/ci/ubuntu-20.04-gcc9-dbg-part2.yaml b/devertexwahn/ci/ubuntu-20.04-gcc9-dbg-part2.yaml index 1892366a..e37179b3 100644 --- a/devertexwahn/ci/ubuntu-20.04-gcc9-dbg-part2.yaml +++ b/devertexwahn/ci/ubuntu-20.04-gcc9-dbg-part2.yaml @@ -18,6 +18,7 @@ steps: submodules: false + - script: | lsb_release -a displayName: 'Show OS version' diff --git a/devertexwahn/ci/ubuntu-20.04-gcc9-fast.yaml b/devertexwahn/ci/ubuntu-20.04-gcc9-fast.yaml index 57f7046c..593b3666 100644 --- a/devertexwahn/ci/ubuntu-20.04-gcc9-fast.yaml +++ b/devertexwahn/ci/ubuntu-20.04-gcc9-fast.yaml @@ -8,7 +8,7 @@ steps: - script: | - df -H + df -h displayName: 'Show disk space' - checkout: self @@ -18,12 +18,13 @@ steps: submodules: false + - script: | lsb_release -a displayName: 'Show OS version' - script: | - df -H + df -h displayName: 'Show disk space' - script: | @@ -37,7 +38,7 @@ steps: - script: | - df -H + df -h displayName: 'Show disk space' - script: | @@ -46,7 +47,7 @@ steps: displayName: "Show compiler information" - script: | - df -H + df -h displayName: 'Show disk space' - script: | @@ -55,7 +56,7 @@ steps: displayName: 'Bazel build gcc9-fastbuild' - script: | - df -H + df -h displayName: 'Show disk space' - script: | diff --git a/devertexwahn/ci/ubuntu-20.04-gcc9-opt.yaml b/devertexwahn/ci/ubuntu-20.04-gcc9-opt.yaml index 66ffe3aa..c1c26c31 100644 --- a/devertexwahn/ci/ubuntu-20.04-gcc9-opt.yaml +++ b/devertexwahn/ci/ubuntu-20.04-gcc9-opt.yaml @@ -18,12 +18,13 @@ steps: submodules: false + - script: | lsb_release -a displayName: 'Show OS version' - script: | - df -H + df -h displayName: 'Show disk space' - script: | @@ -37,7 +38,7 @@ steps: - script: | - df -H + df -h displayName: 'Show disk space' - script: | @@ -46,7 +47,7 @@ steps: displayName: "Show compiler information" - script: | - df -H + df -h displayName: 'Show disk space' # Qt compilation does not work: "You must build your code with position independent code if Qt was built with -reduce-relocations. " "Compile your code with -fPIC (and not with -fPIE)." diff --git a/devertexwahn/ci/ubuntu-22.04-bazel-query.yaml b/devertexwahn/ci/ubuntu-22.04-bazel-query.yaml index cd4967b8..015f8b94 100644 --- a/devertexwahn/ci/ubuntu-22.04-bazel-query.yaml +++ b/devertexwahn/ci/ubuntu-22.04-bazel-query.yaml @@ -18,6 +18,7 @@ steps: submodules: false + - script: | lsb_release -a displayName: "Show OS version" diff --git a/devertexwahn/ci/ubuntu-22.04-clang16.yaml b/devertexwahn/ci/ubuntu-22.04-clang16.yaml index 0ca418cd..079b069b 100644 --- a/devertexwahn/ci/ubuntu-22.04-clang16.yaml +++ b/devertexwahn/ci/ubuntu-22.04-clang16.yaml @@ -18,6 +18,7 @@ steps: submodules: false + - script: | lsb_release -a displayName: "Show OS version" diff --git a/devertexwahn/ci/ubuntu-22.04-coverage.yaml b/devertexwahn/ci/ubuntu-22.04-coverage.yaml index d2737263..fa1b1e01 100644 --- a/devertexwahn/ci/ubuntu-22.04-coverage.yaml +++ b/devertexwahn/ci/ubuntu-22.04-coverage.yaml @@ -15,6 +15,7 @@ steps: fetchDepth: 1 lfs: false submodules: false + - script: | diff --git a/devertexwahn/ci/ubuntu-22.04-cppcheck.yaml b/devertexwahn/ci/ubuntu-22.04-cppcheck.yaml index 4bcf9fea..176a40cb 100644 --- a/devertexwahn/ci/ubuntu-22.04-cppcheck.yaml +++ b/devertexwahn/ci/ubuntu-22.04-cppcheck.yaml @@ -18,6 +18,7 @@ steps: submodules: false + - script: | lsb_release -a displayName: "Show OS version" @@ -33,9 +34,9 @@ steps: # displayName: "Install Cppcheck" - script: | - curl -L -o cppcheck.tar.gz https://github.com/danmar/cppcheck/archive/refs/tags/2.14.1.tar.gz + curl -L -o cppcheck.tar.gz https://github.com/danmar/cppcheck/archive/refs/tags/2.14.2.tar.gz tar xzf cppcheck.tar.gz - cd cppcheck-2.14.1 + cd cppcheck-2.14.2 cmake -S . -B build cmake --build build sudo cmake --install build diff --git a/devertexwahn/ci/ubuntu-22.04-gcc11-bzlmod.yaml b/devertexwahn/ci/ubuntu-22.04-gcc11-bzlmod.yaml index 449014fc..dbb379b0 100644 --- a/devertexwahn/ci/ubuntu-22.04-gcc11-bzlmod.yaml +++ b/devertexwahn/ci/ubuntu-22.04-gcc11-bzlmod.yaml @@ -17,6 +17,7 @@ steps: submodules: false + - script: | lsb_release -a displayName: 'Show OS version' diff --git a/devertexwahn/ci/ubuntu-22.04-gcc11-dbg-part1.yaml b/devertexwahn/ci/ubuntu-22.04-gcc11-dbg-part1.yaml index 0e16003e..f7e8c010 100644 --- a/devertexwahn/ci/ubuntu-22.04-gcc11-dbg-part1.yaml +++ b/devertexwahn/ci/ubuntu-22.04-gcc11-dbg-part1.yaml @@ -16,6 +16,7 @@ steps: fetchDepth: 1 lfs: false submodules: false + - script: | diff --git a/devertexwahn/ci/ubuntu-22.04-gcc11-dbg-part2.yaml b/devertexwahn/ci/ubuntu-22.04-gcc11-dbg-part2.yaml index 68fd23c6..ba5c4a7c 100644 --- a/devertexwahn/ci/ubuntu-22.04-gcc11-dbg-part2.yaml +++ b/devertexwahn/ci/ubuntu-22.04-gcc11-dbg-part2.yaml @@ -18,6 +18,7 @@ steps: submodules: false + - script: | lsb_release -a displayName: "Show OS version" diff --git a/devertexwahn/ci/ubuntu-22.04-gcc11-fast-opt.yaml b/devertexwahn/ci/ubuntu-22.04-gcc11-fast-opt.yaml index 30d9957d..b105afca 100644 --- a/devertexwahn/ci/ubuntu-22.04-gcc11-fast-opt.yaml +++ b/devertexwahn/ci/ubuntu-22.04-gcc11-fast-opt.yaml @@ -18,6 +18,7 @@ steps: submodules: false + - script: | lsb_release -a displayName: "Show OS version" diff --git a/devertexwahn/coverage.sh b/devertexwahn/coverage.sh index e2a432c6..cc44426d 100755 --- a/devertexwahn/coverage.sh +++ b/devertexwahn/coverage.sh @@ -7,19 +7,18 @@ set -euo pipefail -# Check arguments -if [ "$#" -ge 2 ]; then - echo "To many arguments detected. You can provide either one or no argument" - echo "Usage: $0 " - exit 1 -fi - +show_html_page=false additional_bazel_config="" -# Provide a additional bazel config (bazel remote cache config expected) if provided -if [ "$#" -eq 1 ]; then - additional_bazel_config="--config=$1" -fi +# Parse arguments +while [[ "$#" -gt 0 ]]; do + case $1 in + --show_report|-s) show_html_page=true ;; + --additonal_bazel_config=*) additional_bazel_config="--config=${1#*=}" ;; + *) echo "Unknown parameter passed: $1"; exit 1 ;; + esac + shift +done bazel coverage \ --config=gcc11 \ @@ -39,10 +38,13 @@ echo "Current coverage: $current_line_coverage" old_line_coverage="95.0" echo "Old coverage: $old_line_coverage" +# Open coverage report if flag is set +if [ "$show_html_page" = true ]; then + open coverage_report/index.html +fi + # if old coverage is higher than current coverage, fail if (( $(echo "$current_line_coverage < $old_line_coverage"|bc -l) )); then echo "Line coverage is lower than expected" exit 1 fi - -#open coverage_report/index.html diff --git a/devertexwahn/flatland/rendering/scene/load_scene.cpp b/devertexwahn/flatland/rendering/scene/load_scene.cpp index bdb379d3..7e53cdf7 100644 --- a/devertexwahn/flatland/rendering/scene/load_scene.cpp +++ b/devertexwahn/flatland/rendering/scene/load_scene.cpp @@ -161,6 +161,11 @@ Point3f convert_csv_to_point3f(const std::string& csv) { Color3f convert_csv_to_color3f(const std::string& csv) { std::vector values = convert_to_float_vector(csv); + + if(values.size() == 1) { + return Color3f{values[0]}; + } + assert(values.size() == 3); return Color3f{values[0], values[1], values[2]}; } diff --git a/devertexwahn/imaging/io/io.cpp b/devertexwahn/imaging/io/io.cpp index 41233196..d807ccc7 100644 --- a/devertexwahn/imaging/io/io.cpp +++ b/devertexwahn/imaging/io/io.cpp @@ -102,6 +102,11 @@ Image3f load_image(std::string_view filename) { return image; } + if(boost::ends_with(filename, ".jpg")) { + Image3f image = load_image_jpeg(filename.data()); + return image; + } + if (boost::ends_with(filename, ".png")) { ReferenceCounted tmp_image = load_image_png(filename); diff --git a/devertexwahn/imaging/io/io_jpeg.cpp b/devertexwahn/imaging/io/io_jpeg.cpp index 888afc25..cf677651 100644 --- a/devertexwahn/imaging/io/io_jpeg.cpp +++ b/devertexwahn/imaging/io/io_jpeg.cpp @@ -9,6 +9,45 @@ DE_VERTEXWAHN_BEGIN_NAMESPACE +Image3f load_image_jpeg(const char* filename) { + struct jpeg_decompress_struct cinfo; + struct jpeg_error_mgr jerr; + + FILE * infile; + JSAMPARRAY buffer; + int row_stride; + + if ((infile = fopen(filename, "rb")) == nullptr) { + fprintf(stderr, "can't open %s\n", filename); + exit(1); + } + + cinfo.err = jpeg_std_error(&jerr); + jpeg_create_decompress(&cinfo); + jpeg_stdio_src(&cinfo, infile); + (void) jpeg_read_header(&cinfo, TRUE); + (void) jpeg_start_decompress(&cinfo); + + row_stride = cinfo.output_width * cinfo.output_components; + buffer = (*cinfo.mem->alloc_sarray) + ((j_common_ptr) &cinfo, JPOOL_IMAGE, row_stride, 1); + + Image3f image(cinfo.output_width, cinfo.output_height); + + while (cinfo.output_scanline < cinfo.output_height) { + (void) jpeg_read_scanlines(&cinfo, buffer, 1); + for(int x = 0; x < cinfo.output_width; ++x) { + image.set_pixel(x, cinfo.output_scanline - 1, Color3f(buffer[0][x * 3] / 255.f, buffer[0][x * 3 + 1] / 255.f, buffer[0][x * 3 + 2] / 255.f)); + } + } + + (void) jpeg_finish_decompress(&cinfo); + jpeg_destroy_decompress(&cinfo); + fclose(infile); + + return image; +} + bool store_jpeg(const char *filename, const Image4b &image) { int image_width = image.width(); int image_height = image.height(); diff --git a/devertexwahn/imaging/io/io_jpeg.h b/devertexwahn/imaging/io/io_jpeg.h index 23ee412d..9eea6084 100644 --- a/devertexwahn/imaging/io/io_jpeg.h +++ b/devertexwahn/imaging/io/io_jpeg.h @@ -16,6 +16,8 @@ bool store_jpeg(const char *filename, const Image4b &image); bool store_jpeg(const char *filename, const Image3f &image); +Image3f load_image_jpeg(const char* filename); + DE_VERTEXWAHN_END_NAMESPACE -#endif // end define De_Vertexwahn_Imaging_io_jpeg_f13dfab6_d49a_48d4_9c11_003af072c293_h \ No newline at end of file +#endif // end define De_Vertexwahn_Imaging_io_jpeg_f13dfab6_d49a_48d4_9c11_003af072c293_h diff --git a/devertexwahn/tools/compiler_information/main.cpp b/devertexwahn/tools/compiler_information/main.cpp index 509f4ef7..897f4edf 100644 --- a/devertexwahn/tools/compiler_information/main.cpp +++ b/devertexwahn/tools/compiler_information/main.cpp @@ -19,6 +19,7 @@ std::string semantic_versioning_string(int major, int minor, int patch) { // See https://dev.to/yumetodo/list-of-mscver-and-mscfullver-8nd std::string vs_product_name(int version) { std::map versions { + {1929, "Visual Studio 2019 16.11.2"}, {1933, "Visual Studio 2022 17.3.6"}, {1936, "Visual Studio 2022 17.6.2"}, {1937, "Visual Studio 2022 17.7.0"}, diff --git a/third_party/Catch2/.github/workflows/mac-builds.yml b/third_party/Catch2/.github/workflows/mac-builds.yml index 259d8b36..8f0e502a 100644 --- a/third_party/Catch2/.github/workflows/mac-builds.yml +++ b/third_party/Catch2/.github/workflows/mac-builds.yml @@ -4,11 +4,7 @@ on: [push, pull_request] jobs: build: - # macos-12 updated to a toolchain that crashes when linking the - # test binary. This seems to be a known bug in that version, - # and will eventually get fixed in an update. After that, we can go - # back to newer macos images. - runs-on: macos-11 + runs-on: macos-12 strategy: matrix: cxx: @@ -29,8 +25,6 @@ jobs: env: CXX: ${{matrix.cxx}} CXXFLAGS: ${{matrix.cxxflags}} - # Note: $GITHUB_WORKSPACE is distinct from ${{runner.workspace}}. - # This is important run: | cmake -Bbuild -H$GITHUB_WORKSPACE \ -DCMAKE_BUILD_TYPE=${{matrix.build_type}} \ diff --git a/third_party/Catch2/.gitignore b/third_party/Catch2/.gitignore index be955e6c..dbf9f40a 100644 --- a/third_party/Catch2/.gitignore +++ b/third_party/Catch2/.gitignore @@ -25,7 +25,7 @@ Build cmake-build-* benchmark-dir .conan/test_package/build -.conan/test_package/CMakeUserPresets.json +**/CMakeUserPresets.json bazel-* MODULE.bazel.lock build-fuzzers @@ -37,3 +37,4 @@ msvc-sln* docs/doxygen *.cache compile_commands.json +**/*.unapproved.txt diff --git a/third_party/Catch2/MODULE.bazel b/third_party/Catch2/MODULE.bazel index a7846cd6..4d7ec860 100644 --- a/third_party/Catch2/MODULE.bazel +++ b/third_party/Catch2/MODULE.bazel @@ -1,3 +1,3 @@ module(name = "catch2") -bazel_dep(name = "bazel_skylib", version = "1.5.0") +bazel_dep(name = "bazel_skylib", version = "1.7.1") diff --git a/third_party/Catch2/WORKSPACE.bazel b/third_party/Catch2/WORKSPACE.bazel index 357e6f94..e48080a4 100644 --- a/third_party/Catch2/WORKSPACE.bazel +++ b/third_party/Catch2/WORKSPACE.bazel @@ -4,10 +4,10 @@ load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive") http_archive( name = "bazel_skylib", - sha256 = "cd55a062e763b9349921f0f5db8c3933288dc8ba4f76dd9416aac68acee3cb94", + sha256 = "bc283cdfcd526a52c3201279cda4bc298652efa898b10b4db0837dc51652756f", urls = [ - "https://mirror.bazel.build/github.com/bazelbuild/bazel-skylib/releases/download/1.5.0/bazel-skylib-1.5.0.tar.gz", - "https://github.com/bazelbuild/bazel-skylib/releases/download/1.5.0/bazel-skylib-1.5.0.tar.gz", + "https://mirror.bazel.build/github.com/bazelbuild/bazel-skylib/releases/download/1.7.1/bazel-skylib-1.7.1.tar.gz", + "https://github.com/bazelbuild/bazel-skylib/releases/download/1.7.1/bazel-skylib-1.7.1.tar.gz", ], ) diff --git a/third_party/Catch2/docs/cmake-integration.md b/third_party/Catch2/docs/cmake-integration.md index a1bb4358..ad6ca004 100644 --- a/third_party/Catch2/docs/cmake-integration.md +++ b/third_party/Catch2/docs/cmake-integration.md @@ -8,6 +8,7 @@ [`CATCH_CONFIG_*` customization options in CMake](#catch_config_-customization-options-in-cmake)
[Installing Catch2 from git repository](#installing-catch2-from-git-repository)
[Installing Catch2 from vcpkg](#installing-catch2-from-vcpkg)
+[Installing Catch2 from Bazel](#installing-catch2-from-bazel)
Because we use CMake to build Catch2, we also provide a couple of integration points for our users. diff --git a/third_party/Catch2/docs/deprecations.md b/third_party/Catch2/docs/deprecations.md index 1fb79aaa..0b5bee13 100644 --- a/third_party/Catch2/docs/deprecations.md +++ b/third_party/Catch2/docs/deprecations.md @@ -35,6 +35,19 @@ being aborted (when using `--abort` or `--abortx`). It is however **NOT** invoked for test cases that are [explicitly skipped using the `SKIP` macro](skipping-passing-failing.md#top). + +### Non-const function for `TEST_CASE_METHOD` + +> Deprecated in Catch2 vX.Y.Z + +Currently, the member function generated for `TEST_CASE_METHOD` is +not `const` qualified. In the future, the generated member function will +be `const` qualified, just as `TEST_CASE_PERSISTENT_FIXTURE` does. + +If you are mutating the fixture instance from within the test case, and +want to keep doing so in the future, mark the mutated members as `mutable`. + + --- [Home](Readme.md#top) diff --git a/third_party/Catch2/docs/list-of-examples.md b/third_party/Catch2/docs/list-of-examples.md index a919408a..40d3f711 100644 --- a/third_party/Catch2/docs/list-of-examples.md +++ b/third_party/Catch2/docs/list-of-examples.md @@ -8,6 +8,7 @@ - Assertion: [REQUIRE, CHECK](../examples/030-Asn-Require-Check.cpp) - Fixture: [Sections](../examples/100-Fix-Section.cpp) - Fixture: [Class-based fixtures](../examples/110-Fix-ClassFixture.cpp) +- Fixture: [Persistent fixtures](../examples/111-Fix-PersistentFixture.cpp) - BDD: [SCENARIO, GIVEN, WHEN, THEN](../examples/120-Bdd-ScenarioGivenWhenThen.cpp) - Listener: [Listeners](../examples/210-Evt-EventListeners.cpp) - Configuration: [Provide your own output streams](../examples/231-Cfg-OutputStreams.cpp) diff --git a/third_party/Catch2/docs/matchers.md b/third_party/Catch2/docs/matchers.md index d5be1f5a..4b9445ae 100644 --- a/third_party/Catch2/docs/matchers.md +++ b/third_party/Catch2/docs/matchers.md @@ -210,15 +210,36 @@ The other miscellaneous matcher utility is exception matching. #### Matching exceptions -Catch2 provides a utility macro for asserting that an expression -throws exception of specific type, and that the exception has desired -properties. The macro is `REQUIRE_THROWS_MATCHES(expr, ExceptionType, Matcher)`. +Because exceptions are a bit special, Catch2 has a separate macro for them. + + +The basic form is + +``` +REQUIRE_THROWS_MATCHES(expr, ExceptionType, Matcher) +``` + +and it checks that the `expr` throws an exception, that exception is derived +from the `ExceptionType` type, and then `Matcher::match` is called on +the caught exception. > `REQUIRE_THROWS_MATCHES` macro lives in `catch2/matchers/catch_matchers.hpp` +For one-off checks you can use the `Predicate` matcher above, e.g. -Catch2 currently provides two matchers for exceptions. -These are: +```cpp +REQUIRE_THROWS_MATCHES(parse(...), + parse_error, + Predicate([] (parse_error const& err) -> bool { return err.line() == 1; }) +); +``` + +but if you intend to thoroughly test your error reporting, I recommend +defining a specialized matcher. + + +Catch2 also provides 2 built-in matchers for checking the error message +inside an exception (it must be derived from `std::exception`): * `Message(std::string message)`. * `MessageMatches(Matcher matcher)`. @@ -236,10 +257,7 @@ REQUIRE_THROWS_MATCHES(throwsDerivedException(), DerivedException, Message("De REQUIRE_THROWS_MATCHES(throwsDerivedException(), DerivedException, MessageMatches(StartsWith("DerivedException"))); ``` -Note that `DerivedException` in the example above has to derive from -`std::exception` for the example to work. - -> the exception message matcher lives in `catch2/matchers/catch_matchers_exception.hpp` +> the exception message matchers live in `catch2/matchers/catch_matchers_exception.hpp` ### Generic range Matchers diff --git a/third_party/Catch2/docs/other-macros.md b/third_party/Catch2/docs/other-macros.md index 24a0fb6e..79990a6a 100644 --- a/third_party/Catch2/docs/other-macros.md +++ b/third_party/Catch2/docs/other-macros.md @@ -93,30 +93,6 @@ TEST_CASE("STATIC_CHECK showcase", "[traits]") { ## Test case related macros -* `METHOD_AS_TEST_CASE` - -`METHOD_AS_TEST_CASE( member-function-pointer, description )` lets you -register a member function of a class as a Catch2 test case. The class -will be separately instantiated for each method registered in this way. - -```cpp -class TestClass { - std::string s; - -public: - TestClass() - :s( "hello" ) - {} - - void testCase() { - REQUIRE( s == "hello" ); - } -}; - - -METHOD_AS_TEST_CASE( TestClass::testCase, "Use class's method as a test case", "[class]" ) -``` - * `REGISTER_TEST_CASE` `REGISTER_TEST_CASE( function, description )` let's you register diff --git a/third_party/Catch2/docs/test-fixtures.md b/third_party/Catch2/docs/test-fixtures.md index 9c9eaa18..6bc115e2 100644 --- a/third_party/Catch2/docs/test-fixtures.md +++ b/third_party/Catch2/docs/test-fixtures.md @@ -1,9 +1,30 @@ # Test fixtures -## Defining test fixtures +**Contents**
+[Non-Templated test fixtures](#non-templated-test-fixtures)
+[Templated test fixtures](#templated-test-fixtures)
+[Signature-based parameterised test fixtures](#signature-based-parametrised-test-fixtures)
+[Template fixtures with types specified in template type lists](#template-fixtures-with-types-specified-in-template-type-lists)
-Although Catch allows you to group tests together as [sections within a test case](test-cases-and-sections.md), it can still be convenient, sometimes, to group them using a more traditional test fixture. Catch fully supports this too. You define the test fixture as a simple structure: +## Non-Templated test fixtures + +Although Catch2 allows you to group tests together as +[sections within a test case](test-cases-and-sections.md), it can still +be convenient, sometimes, to group them using a more traditional test. +Catch2 fully supports this too with 3 different macros for +non-templated test fixtures. They are: + +| Macro | Description | +|----------|-------------| +|1. `TEST_CASE_METHOD(className, ...)`| Creates a uniquely named class which inherits from the class specified by `className`. The test function will be a member of this derived class. An instance of the derived class will be created for every partial run of the test case. | +|2. `METHOD_AS_TEST_CASE(member-function, ...)`| Uses `member-function` as the test function. An instance of the class will be created for each partial run of the test case. | +|3. `TEST_CASE_PERSISTENT_FIXTURE(className, ...)`| Creates a uniquely named class which inherits from the class specified by `className`. The test function will be a member of this derived class. An instance of the derived class will be created at the start of the test run. That instance will be destroyed once the entire test case has ended. | + +### 1. `TEST_CASE_METHOD` + + +You define a `TEST_CASE_METHOD` test fixture as a simple structure: ```c++ class UniqueTestsFixture { @@ -30,8 +51,116 @@ class UniqueTestsFixture { } ``` -The two test cases here will create uniquely-named derived classes of UniqueTestsFixture and thus can access the `getID()` protected method and `conn` member variables. This ensures that both the test cases are able to create a DBConnection using the same method (DRY principle) and that any ID's created are unique such that the order that tests are executed does not matter. +The two test cases here will create uniquely-named derived classes of +UniqueTestsFixture and thus can access the `getID()` protected method +and `conn` member variables. This ensures that both the test cases +are able to create a DBConnection using the same method +(DRY principle) and that any ID's created are unique such that the +order that tests are executed does not matter. + +### 2. `METHOD_AS_TEST_CASE` + +`METHOD_AS_TEST_CASE` lets you register a member function of a class +as a Catch2 test case. The class will be separately instantiated +for each method registered in this way. + +```cpp +class TestClass { + std::string s; + +public: + TestClass() + :s( "hello" ) + {} + + void testCase() { + REQUIRE( s == "hello" ); + } +}; + + +METHOD_AS_TEST_CASE( TestClass::testCase, "Use class's method as a test case", "[class]" ) +``` + +This type of fixture is similar to [TEST_CASE_METHOD](#1-test_case_method) except in this +case it will directly use the provided class to create an object rather than a derived +class. + +### 3. `TEST_CASE_PERSISTENT_FIXTURE` + +> [Introduced](https://github.com/catchorg/Catch2/pull/2885) in Catch2 X.Y.Z + +`TEST_CASE_PERSISTENT_FIXTURE` behaves in the same way as +[TEST_CASE_METHOD](#1-test_case_method) except that there will only be +one instance created throughout the entire run of a test case. To +demonstrate this have a look at the following example: + +```cpp +class ClassWithExpensiveSetup { +public: + ClassWithExpensiveSetup() { + // expensive construction + std::this_thread::sleep_for( std::chrono::seconds( 2 ) ); + } + + ~ClassWithExpensiveSetup() noexcept { + // expensive destruction + std::this_thread::sleep_for( std::chrono::seconds( 1 ) ); + } + + int getInt() const { return 42; } +}; + +struct MyFixture { + mutable int myInt = 0; + ClassWithExpensiveSetup expensive; +}; + +TEST_CASE_PERSISTENT_FIXTURE( MyFixture, "Tests with MyFixture" ) { + + const int val = myInt++; + + SECTION( "First partial run" ) { + const auto otherValue = expensive.getInt(); + REQUIRE( val == 0 ); + REQUIRE( otherValue == 42 ); + } + + SECTION( "Second partial run" ) { REQUIRE( val == 1 ); } +} +``` + +This example demonstates two possible use-cases of this fixture type: +1. Improve test run times by reducing the amount of expensive and +redundant setup and tear-down required. +2. Reusing results from the previous partial run, in the current +partial run. + +This test case will be executed twice as there are two leaf sections. +On the first run `val` will be `0` and on the second run `val` will be +`1`. This demonstrates that we were able to use the results of the +previous partial run in subsequent partial runs. + +Additionally, we are simulating an expensive object using +`std::this_thread::sleep_for`, but real world use-cases could be: +1. Creating a D3D12/Vulkan device +2. Connecting to a database +3. Loading a file. + +The fixture object (`MyFixture`) will be constructed just before the +test case begins, and it will be destroyed just after the test case +ends. Therefore, this expensive object will only be created and +destroyed once during the execution of this test case. If we had used +`TEST_CASE_METHOD`, `MyFixture` would have been created and destroyed +twice during the execution of this test case. + +NOTE: The member function which runs the test case is `const`. Therefore +if you want to mutate any member of the fixture it must be marked as +`mutable` as shown in this example. This is to make it clear that +the initial state of the fixture is intended to mutate during the +execution of the test case. +## Templated test fixtures Catch2 also provides `TEMPLATE_TEST_CASE_METHOD` and `TEMPLATE_PRODUCT_TEST_CASE_METHOD` that can be used together @@ -93,7 +222,7 @@ _While there is an upper limit on the number of types you can specify in single `TEMPLATE_TEST_CASE_METHOD` or `TEMPLATE_PRODUCT_TEST_CASE_METHOD`, the limit is very high and should not be encountered in practice._ -## Signature-based parametrised test fixtures +## Signature-based parameterised test fixtures > [Introduced](https://github.com/catchorg/Catch2/issues/1609) in Catch2 2.8.0. diff --git a/third_party/Catch2/examples/111-Fix-PersistentFixture.cpp b/third_party/Catch2/examples/111-Fix-PersistentFixture.cpp new file mode 100644 index 00000000..2bef90ff --- /dev/null +++ b/third_party/Catch2/examples/111-Fix-PersistentFixture.cpp @@ -0,0 +1,74 @@ + +// Copyright Catch2 Authors +// Distributed under the Boost Software License, Version 1.0. +// (See accompanying file LICENSE.txt or copy at +// https://www.boost.org/LICENSE_1_0.txt) + +// SPDX-License-Identifier: BSL-1.0 + +// Fixture.cpp + +// Catch2 has three ways to express fixtures: +// - Sections +// - Traditional class-based fixtures that are created and destroyed on every +// partial run +// - Traditional class-based fixtures that are created at the start of a test +// case and destroyed at the end of a test case (this file) + +// main() provided by linkage to Catch2WithMain + +#include + +#include + +class ClassWithExpensiveSetup { +public: + ClassWithExpensiveSetup() { + // Imagine some really expensive set up here. + // e.g. + // setting up a D3D12/Vulkan Device, + // connecting to a database, + // loading a file + // etc etc etc + std::this_thread::sleep_for( std::chrono::seconds( 2 ) ); + } + + ~ClassWithExpensiveSetup() noexcept { + // We can do any clean up of the expensive class in the destructor + // e.g. + // destroy D3D12/Vulkan Device, + // disconnecting from a database, + // release file handle + // etc etc etc + std::this_thread::sleep_for( std::chrono::seconds( 1 ) ); + } + + int getInt() const { return 42; } +}; + +struct MyFixture { + + // The test case member function is const. + // Therefore we need to mark any member of the fixture + // that needs to mutate as mutable. + mutable int myInt = 0; + ClassWithExpensiveSetup expensive; +}; + +// Only one object of type MyFixture will be instantiated for the run +// of this test case even though there are two leaf sections. +// This is useful if your test case requires an object that is +// expensive to create and could be reused for each partial run of the +// test case. +TEST_CASE_PERSISTENT_FIXTURE( MyFixture, "Tests with MyFixture" ) { + + const int val = myInt++; + + SECTION( "First partial run" ) { + const auto otherValue = expensive.getInt(); + REQUIRE( val == 0 ); + REQUIRE( otherValue == 42 ); + } + + SECTION( "Second partial run" ) { REQUIRE( val == 1 ); } +} \ No newline at end of file diff --git a/third_party/Catch2/examples/CMakeLists.txt b/third_party/Catch2/examples/CMakeLists.txt index 7e629e09..4647df1d 100644 --- a/third_party/Catch2/examples/CMakeLists.txt +++ b/third_party/Catch2/examples/CMakeLists.txt @@ -28,6 +28,7 @@ set( SOURCES_IDIOMATIC_EXAMPLES 030-Asn-Require-Check.cpp 100-Fix-Section.cpp 110-Fix-ClassFixture.cpp + 111-Fix-PersistentFixture.cpp 120-Bdd-ScenarioGivenWhenThen.cpp 210-Evt-EventListeners.cpp 232-Cfg-CustomMain.cpp diff --git a/third_party/Catch2/extras/Catch.cmake b/third_party/Catch2/extras/Catch.cmake index 8f30688c..e080665b 100644 --- a/third_party/Catch2/extras/Catch.cmake +++ b/third_party/Catch2/extras/Catch.cmake @@ -124,6 +124,13 @@ same as the Catch name; see also ``TEST_PREFIX`` and ``TEST_SUFFIX``. test executable and when the tests are executed themselves. This requires cmake/ctest >= 3.22. + ``DL_FRAMEWORK_PATHS path...`` + Specifies paths that need to be set for the dynamic linker to find libraries + packaged as frameworks on Apple platforms when running the test executable + (DYLD_FRAMEWORK_PATH). These paths will both be set when retrieving the list + of test cases from the test executable and when the tests are executed themselves. + This requires cmake/ctest >= 3.22. + `DISCOVERY_MODE mode`` Provides control over when ``catch_discover_tests`` performs test discovery. By default, ``POST_BUILD`` sets up a post-build command to perform test discovery @@ -146,7 +153,7 @@ function(catch_discover_tests TARGET) "" "" "TEST_PREFIX;TEST_SUFFIX;WORKING_DIRECTORY;TEST_LIST;REPORTER;OUTPUT_DIR;OUTPUT_PREFIX;OUTPUT_SUFFIX;DISCOVERY_MODE" - "TEST_SPEC;EXTRA_ARGS;PROPERTIES;DL_PATHS" + "TEST_SPEC;EXTRA_ARGS;PROPERTIES;DL_PATHS;DL_FRAMEWORK_PATHS" ${ARGN} ) @@ -156,10 +163,11 @@ function(catch_discover_tests TARGET) if(NOT _TEST_LIST) set(_TEST_LIST ${TARGET}_TESTS) endif() - if (_DL_PATHS) - if(${CMAKE_VERSION} VERSION_LESS "3.22.0") - message(FATAL_ERROR "The DL_PATHS option requires at least cmake 3.22") - endif() + if(_DL_PATHS AND ${CMAKE_VERSION} VERSION_LESS "3.22.0") + message(FATAL_ERROR "The DL_PATHS option requires at least cmake 3.22") + endif() + if(_DL_FRAMEWORK_PATHS AND ${CMAKE_VERSION} VERSION_LESS "3.22.0") + message(FATAL_ERROR "The DL_FRAMEWORK_PATHS option requires at least cmake 3.22") endif() if(NOT _DISCOVERY_MODE) if(NOT CMAKE_CATCH_DISCOVER_TESTS_DISCOVERY_MODE) @@ -205,6 +213,7 @@ function(catch_discover_tests TARGET) -D "TEST_OUTPUT_PREFIX=${_OUTPUT_PREFIX}" -D "TEST_OUTPUT_SUFFIX=${_OUTPUT_SUFFIX}" -D "TEST_DL_PATHS=${_DL_PATHS}" + -D "TEST_DL_FRAMEWORK_PATHS=${_DL_FRAMEWORK_PATHS}" -D "CTEST_FILE=${ctest_tests_file}" -P "${_CATCH_DISCOVER_TESTS_SCRIPT}" VERBATIM @@ -250,6 +259,7 @@ function(catch_discover_tests TARGET) " TEST_OUTPUT_SUFFIX" " [==[" "${_OUTPUT_SUFFIX}" "]==]" "\n" " CTEST_FILE" " [==[" "${ctest_tests_file}" "]==]" "\n" " TEST_DL_PATHS" " [==[" "${_DL_PATHS}" "]==]" "\n" + " TEST_DL_FRAMEWORK_PATHS" " [==[" "${_DL_FRAMEWORK_PATHS}" "]==]" "\n" " CTEST_FILE" " [==[" "${CTEST_FILE}" "]==]" "\n" " )" "\n" " endif()" "\n" diff --git a/third_party/Catch2/extras/CatchAddTests.cmake b/third_party/Catch2/extras/CatchAddTests.cmake index a0731941..604e097a 100644 --- a/third_party/Catch2/extras/CatchAddTests.cmake +++ b/third_party/Catch2/extras/CatchAddTests.cmake @@ -22,7 +22,7 @@ function(catch_discover_tests_impl) "" "" "TEST_EXECUTABLE;TEST_WORKING_DIR;TEST_OUTPUT_DIR;TEST_OUTPUT_PREFIX;TEST_OUTPUT_SUFFIX;TEST_PREFIX;TEST_REPORTER;TEST_SPEC;TEST_SUFFIX;TEST_LIST;CTEST_FILE" - "TEST_EXTRA_ARGS;TEST_PROPERTIES;TEST_EXECUTOR;TEST_DL_PATHS" + "TEST_EXTRA_ARGS;TEST_PROPERTIES;TEST_EXECUTOR;TEST_DL_PATHS;TEST_DL_FRAMEWORK_PATHS" ${ARGN} ) @@ -36,6 +36,7 @@ function(catch_discover_tests_impl) set(output_prefix ${_TEST_OUTPUT_PREFIX}) set(output_suffix ${_TEST_OUTPUT_SUFFIX}) set(dl_paths ${_TEST_DL_PATHS}) + set(dl_framework_paths ${_TEST_DL_FRAMEWORK_PATHS}) set(script) set(suite) set(tests) @@ -60,6 +61,11 @@ function(catch_discover_tests_impl) set(ENV{${dl_paths_variable_name}} "${paths}") endif() + if(APPLE AND dl_framework_paths) + cmake_path(CONVERT "${dl_framework_paths}" TO_NATIVE_PATH_LIST paths) + set(ENV{DYLD_FRAMEWORK_PATH} "${paths}") + endif() + execute_process( COMMAND ${_TEST_EXECUTOR} "${_TEST_EXECUTABLE}" ${spec} --list-tests --verbosity quiet OUTPUT_VARIABLE output @@ -121,6 +127,13 @@ function(catch_discover_tests_impl) endforeach() endif() + if(APPLE AND dl_framework_paths) + foreach(path ${dl_framework_paths}) + cmake_path(NATIVE_PATH path native_path) + list(APPEND environment_modifications "DYLD_FRAMEWORK_PATH=path_list_prepend:${native_path}") + endforeach() + endif() + # Parse output foreach(line ${output}) set(test "${line}") @@ -187,6 +200,7 @@ if(CMAKE_SCRIPT_MODE_FILE) TEST_OUTPUT_PREFIX ${TEST_OUTPUT_PREFIX} TEST_OUTPUT_SUFFIX ${TEST_OUTPUT_SUFFIX} TEST_DL_PATHS ${TEST_DL_PATHS} + TEST_DL_FRAMEWORK_PATHS ${TEST_DL_FRAMEWORK_PATHS} CTEST_FILE ${CTEST_FILE} ) endif() diff --git a/third_party/Catch2/src/catch2/benchmark/catch_benchmark.hpp b/third_party/Catch2/src/catch2/benchmark/catch_benchmark.hpp index 3db40bb0..d0f88cfc 100644 --- a/third_party/Catch2/src/catch2/benchmark/catch_benchmark.hpp +++ b/third_party/Catch2/src/catch2/benchmark/catch_benchmark.hpp @@ -45,12 +45,12 @@ namespace Catch { : fun(CATCH_MOVE(func)), name(CATCH_MOVE(benchmarkName)) {} template - ExecutionPlan prepare(const IConfig &cfg, Environment env) const { + ExecutionPlan prepare(const IConfig &cfg, Environment env) { auto min_time = env.clock_resolution.mean * Detail::minimum_ticks; auto run_time = std::max(min_time, std::chrono::duration_cast(cfg.benchmarkWarmupTime())); auto&& test = Detail::run_for_at_least(std::chrono::duration_cast(run_time), 1, fun); int new_iters = static_cast(std::ceil(min_time * test.iterations / test.elapsed)); - return { new_iters, test.elapsed / test.iterations * new_iters * cfg.benchmarkSamples(), fun, std::chrono::duration_cast(cfg.benchmarkWarmupTime()), Detail::warmup_iterations }; + return { new_iters, test.elapsed / test.iterations * new_iters * cfg.benchmarkSamples(), CATCH_MOVE(fun), std::chrono::duration_cast(cfg.benchmarkWarmupTime()), Detail::warmup_iterations }; } template diff --git a/third_party/Catch2/src/catch2/benchmark/detail/catch_benchmark_function.cpp b/third_party/Catch2/src/catch2/benchmark/detail/catch_benchmark_function.cpp index b437d049..66d4e619 100644 --- a/third_party/Catch2/src/catch2/benchmark/detail/catch_benchmark_function.cpp +++ b/third_party/Catch2/src/catch2/benchmark/detail/catch_benchmark_function.cpp @@ -11,7 +11,13 @@ namespace Catch { namespace Benchmark { namespace Detail { + struct do_nothing { + void operator()() const {} + }; + BenchmarkFunction::callable::~callable() = default; + BenchmarkFunction::BenchmarkFunction(): + f( new model{ {} } ){} } // namespace Detail } // namespace Benchmark } // namespace Catch diff --git a/third_party/Catch2/src/catch2/benchmark/detail/catch_benchmark_function.hpp b/third_party/Catch2/src/catch2/benchmark/detail/catch_benchmark_function.hpp index 144e4b6e..a03cb112 100644 --- a/third_party/Catch2/src/catch2/benchmark/detail/catch_benchmark_function.hpp +++ b/third_party/Catch2/src/catch2/benchmark/detail/catch_benchmark_function.hpp @@ -35,22 +35,17 @@ namespace Catch { private: struct callable { virtual void call(Chronometer meter) const = 0; - virtual Catch::Detail::unique_ptr clone() const = 0; virtual ~callable(); // = default; callable() = default; - callable(callable const&) = default; - callable& operator=(callable const&) = default; + callable(callable&&) = default; + callable& operator=(callable&&) = default; }; template struct model : public callable { model(Fun&& fun_) : fun(CATCH_MOVE(fun_)) {} model(Fun const& fun_) : fun(fun_) {} - Catch::Detail::unique_ptr clone() const override { - return Catch::Detail::make_unique>( *this ); - } - void call(Chronometer meter) const override { call(meter, is_callable()); } @@ -64,14 +59,8 @@ namespace Catch { Fun fun; }; - struct do_nothing { void operator()() const {} }; - - template - BenchmarkFunction(model* c) : f(c) {} - public: - BenchmarkFunction() - : f(new model{ {} }) {} + BenchmarkFunction(); template ::value, int> = 0> @@ -81,20 +70,12 @@ namespace Catch { BenchmarkFunction( BenchmarkFunction&& that ) noexcept: f( CATCH_MOVE( that.f ) ) {} - BenchmarkFunction(BenchmarkFunction const& that) - : f(that.f->clone()) {} - BenchmarkFunction& operator=( BenchmarkFunction&& that ) noexcept { f = CATCH_MOVE( that.f ); return *this; } - BenchmarkFunction& operator=(BenchmarkFunction const& that) { - f = that.f->clone(); - return *this; - } - void operator()(Chronometer meter) const { f->call(meter); } private: diff --git a/third_party/Catch2/src/catch2/benchmark/detail/catch_estimate_clock.hpp b/third_party/Catch2/src/catch2/benchmark/detail/catch_estimate_clock.hpp index 8e355279..6da24ce5 100644 --- a/third_party/Catch2/src/catch2/benchmark/detail/catch_estimate_clock.hpp +++ b/third_party/Catch2/src/catch2/benchmark/detail/catch_estimate_clock.hpp @@ -27,15 +27,17 @@ namespace Catch { namespace Detail { template std::vector resolution(int k) { - std::vector> times; - times.reserve(static_cast(k + 1)); - for ( int i = 0; i < k + 1; ++i ) { - times.push_back( Clock::now() ); + const size_t points = static_cast( k + 1 ); + // To avoid overhead from the branch inside vector::push_back, + // we allocate them all and then overwrite. + std::vector> times(points); + for ( auto& time : times ) { + time = Clock::now(); } std::vector deltas; deltas.reserve(static_cast(k)); - for ( size_t idx = 1; idx < times.size(); ++idx ) { + for ( size_t idx = 1; idx < points; ++idx ) { deltas.push_back( static_cast( ( times[idx] - times[idx - 1] ).count() ) ); } diff --git a/third_party/Catch2/src/catch2/benchmark/detail/catch_measure.hpp b/third_party/Catch2/src/catch2/benchmark/detail/catch_measure.hpp index 37494a68..a8049072 100644 --- a/third_party/Catch2/src/catch2/benchmark/detail/catch_measure.hpp +++ b/third_party/Catch2/src/catch2/benchmark/detail/catch_measure.hpp @@ -20,7 +20,7 @@ namespace Catch { template TimingOf measure(Fun&& fun, Args&&... args) { auto start = Clock::now(); - auto&& r = Detail::complete_invoke(fun, CATCH_FORWARD(args)...); + auto&& r = Detail::complete_invoke(CATCH_FORWARD(fun), CATCH_FORWARD(args)...); auto end = Clock::now(); auto delta = end - start; return { delta, CATCH_FORWARD(r), 1 }; diff --git a/third_party/Catch2/src/catch2/catch_message.cpp b/third_party/Catch2/src/catch2/catch_message.cpp index 4b223d96..7b09ab87 100644 --- a/third_party/Catch2/src/catch2/catch_message.cpp +++ b/third_party/Catch2/src/catch2/catch_message.cpp @@ -91,6 +91,7 @@ namespace Catch { m_messages.back().message += " := "; start = pos; } + break; default:; // noop } } diff --git a/third_party/Catch2/src/catch2/catch_test_case_info.hpp b/third_party/Catch2/src/catch2/catch_test_case_info.hpp index da9927e7..00b393b2 100644 --- a/third_party/Catch2/src/catch2/catch_test_case_info.hpp +++ b/third_party/Catch2/src/catch2/catch_test_case_info.hpp @@ -112,6 +112,14 @@ namespace Catch { TestCaseHandle(TestCaseInfo* info, ITestInvoker* invoker) : m_info(info), m_invoker(invoker) {} + void prepareTestCase() const { + m_invoker->prepareTestCase(); + } + + void tearDownTestCase() const { + m_invoker->tearDownTestCase(); + } + void invoke() const { m_invoker->invoke(); } diff --git a/third_party/Catch2/src/catch2/catch_test_macros.hpp b/third_party/Catch2/src/catch2/catch_test_macros.hpp index 1088afbe..6ee2129f 100644 --- a/third_party/Catch2/src/catch2/catch_test_macros.hpp +++ b/third_party/Catch2/src/catch2/catch_test_macros.hpp @@ -43,6 +43,7 @@ #define CATCH_TEST_CASE( ... ) INTERNAL_CATCH_TESTCASE( __VA_ARGS__ ) #define CATCH_TEST_CASE_METHOD( className, ... ) INTERNAL_CATCH_TEST_CASE_METHOD( className, __VA_ARGS__ ) #define CATCH_METHOD_AS_TEST_CASE( method, ... ) INTERNAL_CATCH_METHOD_AS_TEST_CASE( method, __VA_ARGS__ ) + #define CATCH_TEST_CASE_PERSISTENT_FIXTURE( className, ... ) INTERNAL_CATCH_TEST_CASE_PERSISTENT_FIXTURE( className, __VA_ARGS__ ) #define CATCH_REGISTER_TEST_CASE( Function, ... ) INTERNAL_CATCH_REGISTER_TESTCASE( Function, __VA_ARGS__ ) #define CATCH_SECTION( ... ) INTERNAL_CATCH_SECTION( __VA_ARGS__ ) #define CATCH_DYNAMIC_SECTION( ... ) INTERNAL_CATCH_DYNAMIC_SECTION( __VA_ARGS__ ) @@ -97,6 +98,7 @@ #define CATCH_TEST_CASE( ... ) INTERNAL_CATCH_TESTCASE_NO_REGISTRATION(INTERNAL_CATCH_UNIQUE_NAME( CATCH2_INTERNAL_TEST_ )) #define CATCH_TEST_CASE_METHOD( className, ... ) INTERNAL_CATCH_TESTCASE_NO_REGISTRATION(INTERNAL_CATCH_UNIQUE_NAME( CATCH2_INTERNAL_TEST_ )) #define CATCH_METHOD_AS_TEST_CASE( method, ... ) + #define CATCH_TEST_CASE_PERSISTENT_FIXTURE( className, ... ) INTERNAL_CATCH_TESTCASE_NO_REGISTRATION(INTERNAL_CATCH_UNIQUE_NAME( CATCH2_INTERNAL_TEST_ )) #define CATCH_REGISTER_TEST_CASE( Function, ... ) (void)(0) #define CATCH_SECTION( ... ) #define CATCH_DYNAMIC_SECTION( ... ) @@ -142,6 +144,7 @@ #define TEST_CASE( ... ) INTERNAL_CATCH_TESTCASE( __VA_ARGS__ ) #define TEST_CASE_METHOD( className, ... ) INTERNAL_CATCH_TEST_CASE_METHOD( className, __VA_ARGS__ ) #define METHOD_AS_TEST_CASE( method, ... ) INTERNAL_CATCH_METHOD_AS_TEST_CASE( method, __VA_ARGS__ ) + #define TEST_CASE_PERSISTENT_FIXTURE( className, ... ) INTERNAL_CATCH_TEST_CASE_PERSISTENT_FIXTURE( className, __VA_ARGS__ ) #define REGISTER_TEST_CASE( Function, ... ) INTERNAL_CATCH_REGISTER_TESTCASE( Function, __VA_ARGS__ ) #define SECTION( ... ) INTERNAL_CATCH_SECTION( __VA_ARGS__ ) #define DYNAMIC_SECTION( ... ) INTERNAL_CATCH_DYNAMIC_SECTION( __VA_ARGS__ ) @@ -195,6 +198,7 @@ #define TEST_CASE( ... ) INTERNAL_CATCH_TESTCASE_NO_REGISTRATION(INTERNAL_CATCH_UNIQUE_NAME( CATCH2_INTERNAL_TEST_ ), __VA_ARGS__) #define TEST_CASE_METHOD( className, ... ) INTERNAL_CATCH_TESTCASE_NO_REGISTRATION(INTERNAL_CATCH_UNIQUE_NAME( CATCH2_INTERNAL_TEST_ )) #define METHOD_AS_TEST_CASE( method, ... ) + #define TEST_CASE_PERSISTENT_FIXTURE( className, ... ) INTERNAL_CATCH_TESTCASE_NO_REGISTRATION(INTERNAL_CATCH_UNIQUE_NAME( CATCH2_INTERNAL_TEST_ ), __VA_ARGS__) #define REGISTER_TEST_CASE( Function, ... ) (void)(0) #define SECTION( ... ) #define DYNAMIC_SECTION( ... ) diff --git a/third_party/Catch2/src/catch2/interfaces/catch_interfaces_test_invoker.hpp b/third_party/Catch2/src/catch2/interfaces/catch_interfaces_test_invoker.hpp index 3caeff9a..124a7f7d 100644 --- a/third_party/Catch2/src/catch2/interfaces/catch_interfaces_test_invoker.hpp +++ b/third_party/Catch2/src/catch2/interfaces/catch_interfaces_test_invoker.hpp @@ -12,6 +12,8 @@ namespace Catch { class ITestInvoker { public: + virtual void prepareTestCase(); + virtual void tearDownTestCase(); virtual void invoke() const = 0; virtual ~ITestInvoker(); // = default }; diff --git a/third_party/Catch2/src/catch2/internal/catch_run_context.cpp b/third_party/Catch2/src/catch2/internal/catch_run_context.cpp index 07691788..8711352c 100644 --- a/third_party/Catch2/src/catch2/internal/catch_run_context.cpp +++ b/third_party/Catch2/src/catch2/internal/catch_run_context.cpp @@ -185,6 +185,7 @@ namespace Catch { auto const& testInfo = testCase.getTestCaseInfo(); m_reporter->testCaseStarting(testInfo); + testCase.prepareTestCase(); m_activeTestCase = &testCase; @@ -254,6 +255,7 @@ namespace Catch { deltaTotals.testCases.failed++; } m_totals.testCases += deltaTotals.testCases; + testCase.tearDownTestCase(); m_reporter->testCaseEnded(TestCaseStats(testInfo, deltaTotals, CATCH_MOVE(redirectedCout), diff --git a/third_party/Catch2/src/catch2/internal/catch_test_registry.cpp b/third_party/Catch2/src/catch2/internal/catch_test_registry.cpp index e9c999fe..07e44617 100644 --- a/third_party/Catch2/src/catch2/internal/catch_test_registry.cpp +++ b/third_party/Catch2/src/catch2/internal/catch_test_registry.cpp @@ -16,6 +16,8 @@ #include namespace Catch { + void ITestInvoker::prepareTestCase() {} + void ITestInvoker::tearDownTestCase() {} ITestInvoker::~ITestInvoker() = default; namespace { diff --git a/third_party/Catch2/src/catch2/internal/catch_test_registry.hpp b/third_party/Catch2/src/catch2/internal/catch_test_registry.hpp index c62fbdcc..e275f2b9 100644 --- a/third_party/Catch2/src/catch2/internal/catch_test_registry.hpp +++ b/third_party/Catch2/src/catch2/internal/catch_test_registry.hpp @@ -47,6 +47,33 @@ Detail::unique_ptr makeTestInvoker( void (C::*testAsMethod)() ) { return Detail::make_unique>( testAsMethod ); } +template +class TestInvokerFixture : public ITestInvoker { + void ( C::*m_testAsMethod )() const; + Detail::unique_ptr m_fixture = nullptr; + +public: + TestInvokerFixture( void ( C::*testAsMethod )() const) noexcept : m_testAsMethod( testAsMethod ) {} + + void prepareTestCase() override { + m_fixture = Detail::make_unique(); + } + + void tearDownTestCase() override { + m_fixture.reset(); + } + + void invoke() const override { + auto* f = m_fixture.get(); + ( f->*m_testAsMethod )(); + } +}; + +template +Detail::unique_ptr makeTestInvokerFixture( void ( C::*testAsMethod )() const ) { + return Detail::make_unique>( testAsMethod ); +} + struct NameAndTags { constexpr NameAndTags( StringRef name_ = StringRef(), StringRef tags_ = StringRef() ) noexcept: @@ -143,6 +170,26 @@ static int catchInternalSectionHint = 0; #define INTERNAL_CATCH_TEST_CASE_METHOD( ClassName, ... ) \ INTERNAL_CATCH_TEST_CASE_METHOD2( INTERNAL_CATCH_UNIQUE_NAME( CATCH2_INTERNAL_TEST_ ), ClassName, __VA_ARGS__ ) + /////////////////////////////////////////////////////////////////////////////// + #define INTERNAL_CATCH_TEST_CASE_PERSISTENT_FIXTURE2( TestName, ClassName, ... ) \ + CATCH_INTERNAL_START_WARNINGS_SUPPRESSION \ + CATCH_INTERNAL_SUPPRESS_GLOBALS_WARNINGS \ + CATCH_INTERNAL_SUPPRESS_UNUSED_VARIABLE_WARNINGS \ + namespace { \ + struct TestName : INTERNAL_CATCH_REMOVE_PARENS( ClassName ) { \ + void test() const; \ + }; \ + const Catch::AutoReg INTERNAL_CATCH_UNIQUE_NAME( autoRegistrar )( \ + Catch::makeTestInvokerFixture( &TestName::test ), \ + CATCH_INTERNAL_LINEINFO, \ + #ClassName##_catch_sr, \ + Catch::NameAndTags{ __VA_ARGS__ } ); /* NOLINT */ \ + } \ + CATCH_INTERNAL_STOP_WARNINGS_SUPPRESSION \ + void TestName::test() const + #define INTERNAL_CATCH_TEST_CASE_PERSISTENT_FIXTURE( ClassName, ... ) \ + INTERNAL_CATCH_TEST_CASE_PERSISTENT_FIXTURE2( INTERNAL_CATCH_UNIQUE_NAME( CATCH2_INTERNAL_TEST_ ), ClassName, __VA_ARGS__ ) + /////////////////////////////////////////////////////////////////////////////// #define INTERNAL_CATCH_METHOD_AS_TEST_CASE( QualifiedMethod, ... ) \ diff --git a/third_party/Catch2/tests/ExtraTests/X02-DisabledMacros.cpp b/third_party/Catch2/tests/ExtraTests/X02-DisabledMacros.cpp index 68bc2add..231adfb0 100644 --- a/third_party/Catch2/tests/ExtraTests/X02-DisabledMacros.cpp +++ b/third_party/Catch2/tests/ExtraTests/X02-DisabledMacros.cpp @@ -11,34 +11,28 @@ * and expressions in assertion macros are not run. */ - -#include #include +#include #include #include #include struct foo { - foo(){ - REQUIRE_NOTHROW( print() ); - } - void print() const { - std::cout << "This should not happen\n"; - } + foo() { REQUIRE_NOTHROW( print() ); } + void print() const { std::cout << "This should not happen\n"; } }; -#if defined(__clang__) -#pragma clang diagnostic push -#pragma clang diagnostic ignored "-Wglobal-constructors" +#if defined( __clang__ ) +# pragma clang diagnostic push +# pragma clang diagnostic ignored "-Wglobal-constructors" #endif // Construct foo, but `foo::print` should not be run static foo f; - -#if defined(__clang__) +#if defined( __clang__ ) // The test is unused since the registration is disabled -#pragma clang diagnostic ignored "-Wunused-function" +# pragma clang diagnostic ignored "-Wunused-function" #endif // This test should not be run, because it won't be registered @@ -60,6 +54,26 @@ TEST_CASE( "Disabled Macros" ) { BENCHMARK( "Disabled benchmark" ) { REQUIRE( 1 == 2 ); }; } -#if defined(__clang__) -#pragma clang diagnostic pop +struct DisabledFixture {}; + +TEST_CASE_PERSISTENT_FIXTURE( DisabledFixture, "Disabled Persistent Fixture" ) { + CHECK( 1 == 2 ); + REQUIRE( 1 == 2 ); + std::cout << "This should not happen\n"; + FAIL(); + + // Test that static assertions don't fire when macros are disabled + STATIC_CHECK( 0 == 1 ); + STATIC_REQUIRE( !true ); + + CAPTURE( 1 ); + CAPTURE( 1, "captured" ); + + REQUIRE_THAT( 1, + Catch::Matchers::Predicate( []( int ) { return false; } ) ); + BENCHMARK( "Disabled benchmark" ) { REQUIRE( 1 == 2 ); }; +} + +#if defined( __clang__ ) +# pragma clang diagnostic pop #endif diff --git a/third_party/Catch2/tests/SelfTest/Baselines/automake.sw.approved.txt b/third_party/Catch2/tests/SelfTest/Baselines/automake.sw.approved.txt index dbdf395c..ce9475b8 100644 --- a/third_party/Catch2/tests/SelfTest/Baselines/automake.sw.approved.txt +++ b/third_party/Catch2/tests/SelfTest/Baselines/automake.sw.approved.txt @@ -68,6 +68,8 @@ Nor would this :test-result: PASS A TEMPLATE_TEST_CASE_METHOD_SIG based test run that succeeds - 6 :test-result: FAIL A TEST_CASE_METHOD based test run that fails :test-result: PASS A TEST_CASE_METHOD based test run that succeeds +:test-result: FAIL A TEST_CASE_PERSISTENT_FIXTURE based test run that fails +:test-result: PASS A TEST_CASE_PERSISTENT_FIXTURE based test run that succeeds :test-result: PASS A Template product test case - Foo :test-result: PASS A Template product test case - Foo :test-result: PASS A Template product test case - std::vector diff --git a/third_party/Catch2/tests/SelfTest/Baselines/automake.sw.multi.approved.txt b/third_party/Catch2/tests/SelfTest/Baselines/automake.sw.multi.approved.txt index 4029ec65..06f2f58d 100644 --- a/third_party/Catch2/tests/SelfTest/Baselines/automake.sw.multi.approved.txt +++ b/third_party/Catch2/tests/SelfTest/Baselines/automake.sw.multi.approved.txt @@ -66,6 +66,8 @@ :test-result: PASS A TEMPLATE_TEST_CASE_METHOD_SIG based test run that succeeds - 6 :test-result: FAIL A TEST_CASE_METHOD based test run that fails :test-result: PASS A TEST_CASE_METHOD based test run that succeeds +:test-result: FAIL A TEST_CASE_PERSISTENT_FIXTURE based test run that fails +:test-result: PASS A TEST_CASE_PERSISTENT_FIXTURE based test run that succeeds :test-result: PASS A Template product test case - Foo :test-result: PASS A Template product test case - Foo :test-result: PASS A Template product test case - std::vector diff --git a/third_party/Catch2/tests/SelfTest/Baselines/compact.sw.approved.txt b/third_party/Catch2/tests/SelfTest/Baselines/compact.sw.approved.txt index 9363cb54..0f107e71 100644 --- a/third_party/Catch2/tests/SelfTest/Baselines/compact.sw.approved.txt +++ b/third_party/Catch2/tests/SelfTest/Baselines/compact.sw.approved.txt @@ -241,6 +241,10 @@ Class.tests.cpp:: passed: Nttp_Fixture::value > 0 for: 3 > 0 Class.tests.cpp:: passed: Nttp_Fixture::value > 0 for: 6 > 0 Class.tests.cpp:: failed: m_a == 2 for: 1 == 2 Class.tests.cpp:: passed: m_a == 1 for: 1 == 1 +Class.tests.cpp:: passed: m_a++ == 0 for: 0 == 0 +Class.tests.cpp:: failed: m_a == 0 for: 1 == 0 +Class.tests.cpp:: passed: m_a++ == 0 for: 0 == 0 +Class.tests.cpp:: passed: m_a == 1 for: 1 == 1 Misc.tests.cpp:: passed: x.size() == 0 for: 0 == 0 Misc.tests.cpp:: passed: x.size() == 0 for: 0 == 0 Misc.tests.cpp:: passed: x.size() == 0 for: 0 == 0 @@ -2840,7 +2844,7 @@ InternalBenchmark.tests.cpp:: passed: med == 18. for: 18.0 == 18.0 InternalBenchmark.tests.cpp:: passed: q3 == 23. for: 23.0 == 23.0 Misc.tests.cpp:: passed: Misc.tests.cpp:: passed: -test cases: 416 | 311 passed | 85 failed | 6 skipped | 14 failed as expected -assertions: 2255 | 2074 passed | 146 failed | 35 failed as expected +test cases: 418 | 312 passed | 86 failed | 6 skipped | 14 failed as expected +assertions: 2259 | 2077 passed | 147 failed | 35 failed as expected diff --git a/third_party/Catch2/tests/SelfTest/Baselines/compact.sw.multi.approved.txt b/third_party/Catch2/tests/SelfTest/Baselines/compact.sw.multi.approved.txt index 4007dfca..4b9d35c6 100644 --- a/third_party/Catch2/tests/SelfTest/Baselines/compact.sw.multi.approved.txt +++ b/third_party/Catch2/tests/SelfTest/Baselines/compact.sw.multi.approved.txt @@ -239,6 +239,10 @@ Class.tests.cpp:: passed: Nttp_Fixture::value > 0 for: 3 > 0 Class.tests.cpp:: passed: Nttp_Fixture::value > 0 for: 6 > 0 Class.tests.cpp:: failed: m_a == 2 for: 1 == 2 Class.tests.cpp:: passed: m_a == 1 for: 1 == 1 +Class.tests.cpp:: passed: m_a++ == 0 for: 0 == 0 +Class.tests.cpp:: failed: m_a == 0 for: 1 == 0 +Class.tests.cpp:: passed: m_a++ == 0 for: 0 == 0 +Class.tests.cpp:: passed: m_a == 1 for: 1 == 1 Misc.tests.cpp:: passed: x.size() == 0 for: 0 == 0 Misc.tests.cpp:: passed: x.size() == 0 for: 0 == 0 Misc.tests.cpp:: passed: x.size() == 0 for: 0 == 0 @@ -2829,7 +2833,7 @@ InternalBenchmark.tests.cpp:: passed: med == 18. for: 18.0 == 18.0 InternalBenchmark.tests.cpp:: passed: q3 == 23. for: 23.0 == 23.0 Misc.tests.cpp:: passed: Misc.tests.cpp:: passed: -test cases: 416 | 311 passed | 85 failed | 6 skipped | 14 failed as expected -assertions: 2255 | 2074 passed | 146 failed | 35 failed as expected +test cases: 418 | 312 passed | 86 failed | 6 skipped | 14 failed as expected +assertions: 2259 | 2077 passed | 147 failed | 35 failed as expected diff --git a/third_party/Catch2/tests/SelfTest/Baselines/console.std.approved.txt b/third_party/Catch2/tests/SelfTest/Baselines/console.std.approved.txt index 9b398435..f3196855 100644 --- a/third_party/Catch2/tests/SelfTest/Baselines/console.std.approved.txt +++ b/third_party/Catch2/tests/SelfTest/Baselines/console.std.approved.txt @@ -297,6 +297,18 @@ Class.tests.cpp:: FAILED: with expansion: 1 == 2 +------------------------------------------------------------------------------- +A TEST_CASE_PERSISTENT_FIXTURE based test run that fails + Second partial run +------------------------------------------------------------------------------- +Class.tests.cpp: +............................................................................... + +Class.tests.cpp:: FAILED: + REQUIRE( m_a == 0 ) +with expansion: + 1 == 0 + ------------------------------------------------------------------------------- A couple of nested sections followed by a failure ------------------------------------------------------------------------------- @@ -1598,6 +1610,6 @@ due to unexpected exception with message: Why would you throw a std::string? =============================================================================== -test cases: 416 | 325 passed | 70 failed | 7 skipped | 14 failed as expected -assertions: 2238 | 2074 passed | 129 failed | 35 failed as expected +test cases: 418 | 326 passed | 71 failed | 7 skipped | 14 failed as expected +assertions: 2242 | 2077 passed | 130 failed | 35 failed as expected diff --git a/third_party/Catch2/tests/SelfTest/Baselines/console.sw.approved.txt b/third_party/Catch2/tests/SelfTest/Baselines/console.sw.approved.txt index de89152e..ba18ad1f 100644 --- a/third_party/Catch2/tests/SelfTest/Baselines/console.sw.approved.txt +++ b/third_party/Catch2/tests/SelfTest/Baselines/console.sw.approved.txt @@ -2023,6 +2023,54 @@ A TEST_CASE_METHOD based test run that succeeds Class.tests.cpp: ............................................................................... +Class.tests.cpp:: PASSED: + REQUIRE( m_a == 1 ) +with expansion: + 1 == 1 + +------------------------------------------------------------------------------- +A TEST_CASE_PERSISTENT_FIXTURE based test run that fails + First partial run +------------------------------------------------------------------------------- +Class.tests.cpp: +............................................................................... + +Class.tests.cpp:: PASSED: + REQUIRE( m_a++ == 0 ) +with expansion: + 0 == 0 + +------------------------------------------------------------------------------- +A TEST_CASE_PERSISTENT_FIXTURE based test run that fails + Second partial run +------------------------------------------------------------------------------- +Class.tests.cpp: +............................................................................... + +Class.tests.cpp:: FAILED: + REQUIRE( m_a == 0 ) +with expansion: + 1 == 0 + +------------------------------------------------------------------------------- +A TEST_CASE_PERSISTENT_FIXTURE based test run that succeeds + First partial run +------------------------------------------------------------------------------- +Class.tests.cpp: +............................................................................... + +Class.tests.cpp:: PASSED: + REQUIRE( m_a++ == 0 ) +with expansion: + 0 == 0 + +------------------------------------------------------------------------------- +A TEST_CASE_PERSISTENT_FIXTURE based test run that succeeds + Second partial run +------------------------------------------------------------------------------- +Class.tests.cpp: +............................................................................... + Class.tests.cpp:: PASSED: REQUIRE( m_a == 1 ) with expansion: @@ -18894,6 +18942,6 @@ Misc.tests.cpp: Misc.tests.cpp:: PASSED: =============================================================================== -test cases: 416 | 311 passed | 85 failed | 6 skipped | 14 failed as expected -assertions: 2255 | 2074 passed | 146 failed | 35 failed as expected +test cases: 418 | 312 passed | 86 failed | 6 skipped | 14 failed as expected +assertions: 2259 | 2077 passed | 147 failed | 35 failed as expected diff --git a/third_party/Catch2/tests/SelfTest/Baselines/console.sw.multi.approved.txt b/third_party/Catch2/tests/SelfTest/Baselines/console.sw.multi.approved.txt index f70c0d6f..bdb54d43 100644 --- a/third_party/Catch2/tests/SelfTest/Baselines/console.sw.multi.approved.txt +++ b/third_party/Catch2/tests/SelfTest/Baselines/console.sw.multi.approved.txt @@ -2021,6 +2021,54 @@ A TEST_CASE_METHOD based test run that succeeds Class.tests.cpp: ............................................................................... +Class.tests.cpp:: PASSED: + REQUIRE( m_a == 1 ) +with expansion: + 1 == 1 + +------------------------------------------------------------------------------- +A TEST_CASE_PERSISTENT_FIXTURE based test run that fails + First partial run +------------------------------------------------------------------------------- +Class.tests.cpp: +............................................................................... + +Class.tests.cpp:: PASSED: + REQUIRE( m_a++ == 0 ) +with expansion: + 0 == 0 + +------------------------------------------------------------------------------- +A TEST_CASE_PERSISTENT_FIXTURE based test run that fails + Second partial run +------------------------------------------------------------------------------- +Class.tests.cpp: +............................................................................... + +Class.tests.cpp:: FAILED: + REQUIRE( m_a == 0 ) +with expansion: + 1 == 0 + +------------------------------------------------------------------------------- +A TEST_CASE_PERSISTENT_FIXTURE based test run that succeeds + First partial run +------------------------------------------------------------------------------- +Class.tests.cpp: +............................................................................... + +Class.tests.cpp:: PASSED: + REQUIRE( m_a++ == 0 ) +with expansion: + 0 == 0 + +------------------------------------------------------------------------------- +A TEST_CASE_PERSISTENT_FIXTURE based test run that succeeds + Second partial run +------------------------------------------------------------------------------- +Class.tests.cpp: +............................................................................... + Class.tests.cpp:: PASSED: REQUIRE( m_a == 1 ) with expansion: @@ -18883,6 +18931,6 @@ Misc.tests.cpp: Misc.tests.cpp:: PASSED: =============================================================================== -test cases: 416 | 311 passed | 85 failed | 6 skipped | 14 failed as expected -assertions: 2255 | 2074 passed | 146 failed | 35 failed as expected +test cases: 418 | 312 passed | 86 failed | 6 skipped | 14 failed as expected +assertions: 2259 | 2077 passed | 147 failed | 35 failed as expected diff --git a/third_party/Catch2/tests/SelfTest/Baselines/junit.sw.approved.txt b/third_party/Catch2/tests/SelfTest/Baselines/junit.sw.approved.txt index 1888139a..cfe9de63 100644 --- a/third_party/Catch2/tests/SelfTest/Baselines/junit.sw.approved.txt +++ b/third_party/Catch2/tests/SelfTest/Baselines/junit.sw.approved.txt @@ -1,7 +1,7 @@ - + @@ -313,6 +313,18 @@ at Class.tests.cpp: + + + +FAILED: + REQUIRE( m_a == 0 ) +with expansion: + 1 == 0 +at Class.tests.cpp: + + + + diff --git a/third_party/Catch2/tests/SelfTest/Baselines/junit.sw.multi.approved.txt b/third_party/Catch2/tests/SelfTest/Baselines/junit.sw.multi.approved.txt index b594c8c0..42db614f 100644 --- a/third_party/Catch2/tests/SelfTest/Baselines/junit.sw.multi.approved.txt +++ b/third_party/Catch2/tests/SelfTest/Baselines/junit.sw.multi.approved.txt @@ -1,6 +1,6 @@ - + @@ -312,6 +312,18 @@ at Class.tests.cpp: + + + +FAILED: + REQUIRE( m_a == 0 ) +with expansion: + 1 == 0 +at Class.tests.cpp: + + + + diff --git a/third_party/Catch2/tests/SelfTest/Baselines/sonarqube.sw.approved.txt b/third_party/Catch2/tests/SelfTest/Baselines/sonarqube.sw.approved.txt index 537145cc..1b5b8757 100644 --- a/third_party/Catch2/tests/SelfTest/Baselines/sonarqube.sw.approved.txt +++ b/third_party/Catch2/tests/SelfTest/Baselines/sonarqube.sw.approved.txt @@ -519,6 +519,18 @@ at Class.tests.cpp: + + + +FAILED: + REQUIRE( m_a == 0 ) +with expansion: + 1 == 0 +at Class.tests.cpp: + + + + diff --git a/third_party/Catch2/tests/SelfTest/Baselines/sonarqube.sw.multi.approved.txt b/third_party/Catch2/tests/SelfTest/Baselines/sonarqube.sw.multi.approved.txt index 5965774d..dc6b3b31 100644 --- a/third_party/Catch2/tests/SelfTest/Baselines/sonarqube.sw.multi.approved.txt +++ b/third_party/Catch2/tests/SelfTest/Baselines/sonarqube.sw.multi.approved.txt @@ -518,6 +518,18 @@ at Class.tests.cpp: + + + +FAILED: + REQUIRE( m_a == 0 ) +with expansion: + 1 == 0 +at Class.tests.cpp: + + + + diff --git a/third_party/Catch2/tests/SelfTest/Baselines/tap.sw.approved.txt b/third_party/Catch2/tests/SelfTest/Baselines/tap.sw.approved.txt index 383bd4b0..36da9a8e 100644 --- a/third_party/Catch2/tests/SelfTest/Baselines/tap.sw.approved.txt +++ b/third_party/Catch2/tests/SelfTest/Baselines/tap.sw.approved.txt @@ -478,6 +478,14 @@ ok {test-number} - Nttp_Fixture::value > 0 for: 6 > 0 not ok {test-number} - m_a == 2 for: 1 == 2 # A TEST_CASE_METHOD based test run that succeeds ok {test-number} - m_a == 1 for: 1 == 1 +# A TEST_CASE_PERSISTENT_FIXTURE based test run that fails +ok {test-number} - m_a++ == 0 for: 0 == 0 +# A TEST_CASE_PERSISTENT_FIXTURE based test run that fails +not ok {test-number} - m_a == 0 for: 1 == 0 +# A TEST_CASE_PERSISTENT_FIXTURE based test run that succeeds +ok {test-number} - m_a++ == 0 for: 0 == 0 +# A TEST_CASE_PERSISTENT_FIXTURE based test run that succeeds +ok {test-number} - m_a == 1 for: 1 == 1 # A Template product test case - Foo ok {test-number} - x.size() == 0 for: 0 == 0 # A Template product test case - Foo @@ -4539,5 +4547,5 @@ ok {test-number} - q3 == 23. for: 23.0 == 23.0 ok {test-number} - # xmlentitycheck ok {test-number} - -1..2267 +1..2271 diff --git a/third_party/Catch2/tests/SelfTest/Baselines/tap.sw.multi.approved.txt b/third_party/Catch2/tests/SelfTest/Baselines/tap.sw.multi.approved.txt index 6622a96a..64828cb1 100644 --- a/third_party/Catch2/tests/SelfTest/Baselines/tap.sw.multi.approved.txt +++ b/third_party/Catch2/tests/SelfTest/Baselines/tap.sw.multi.approved.txt @@ -476,6 +476,14 @@ ok {test-number} - Nttp_Fixture::value > 0 for: 6 > 0 not ok {test-number} - m_a == 2 for: 1 == 2 # A TEST_CASE_METHOD based test run that succeeds ok {test-number} - m_a == 1 for: 1 == 1 +# A TEST_CASE_PERSISTENT_FIXTURE based test run that fails +ok {test-number} - m_a++ == 0 for: 0 == 0 +# A TEST_CASE_PERSISTENT_FIXTURE based test run that fails +not ok {test-number} - m_a == 0 for: 1 == 0 +# A TEST_CASE_PERSISTENT_FIXTURE based test run that succeeds +ok {test-number} - m_a++ == 0 for: 0 == 0 +# A TEST_CASE_PERSISTENT_FIXTURE based test run that succeeds +ok {test-number} - m_a == 1 for: 1 == 1 # A Template product test case - Foo ok {test-number} - x.size() == 0 for: 0 == 0 # A Template product test case - Foo @@ -4528,5 +4536,5 @@ ok {test-number} - q3 == 23. for: 23.0 == 23.0 ok {test-number} - # xmlentitycheck ok {test-number} - -1..2267 +1..2271 diff --git a/third_party/Catch2/tests/SelfTest/Baselines/teamcity.sw.approved.txt b/third_party/Catch2/tests/SelfTest/Baselines/teamcity.sw.approved.txt index cec3a0ee..d305ee83 100644 --- a/third_party/Catch2/tests/SelfTest/Baselines/teamcity.sw.approved.txt +++ b/third_party/Catch2/tests/SelfTest/Baselines/teamcity.sw.approved.txt @@ -166,6 +166,11 @@ ##teamcity[testFinished name='A TEST_CASE_METHOD based test run that fails' duration="{duration}"] ##teamcity[testStarted name='A TEST_CASE_METHOD based test run that succeeds'] ##teamcity[testFinished name='A TEST_CASE_METHOD based test run that succeeds' duration="{duration}"] +##teamcity[testStarted name='A TEST_CASE_PERSISTENT_FIXTURE based test run that fails'] +##teamcity[testFailed name='A TEST_CASE_PERSISTENT_FIXTURE based test run that fails' message='-------------------------------------------------------------------------------|nSecond partial run|n-------------------------------------------------------------------------------|nClass.tests.cpp:|n...............................................................................|n|nClass.tests.cpp:|nexpression failed|n REQUIRE( m_a == 0 )|nwith expansion:|n 1 == 0|n'] +##teamcity[testFinished name='A TEST_CASE_PERSISTENT_FIXTURE based test run that fails' duration="{duration}"] +##teamcity[testStarted name='A TEST_CASE_PERSISTENT_FIXTURE based test run that succeeds'] +##teamcity[testFinished name='A TEST_CASE_PERSISTENT_FIXTURE based test run that succeeds' duration="{duration}"] ##teamcity[testStarted name='A Template product test case - Foo'] ##teamcity[testFinished name='A Template product test case - Foo' duration="{duration}"] ##teamcity[testStarted name='A Template product test case - Foo'] diff --git a/third_party/Catch2/tests/SelfTest/Baselines/teamcity.sw.multi.approved.txt b/third_party/Catch2/tests/SelfTest/Baselines/teamcity.sw.multi.approved.txt index 6f7d8f19..156a8e2c 100644 --- a/third_party/Catch2/tests/SelfTest/Baselines/teamcity.sw.multi.approved.txt +++ b/third_party/Catch2/tests/SelfTest/Baselines/teamcity.sw.multi.approved.txt @@ -166,6 +166,11 @@ ##teamcity[testFinished name='A TEST_CASE_METHOD based test run that fails' duration="{duration}"] ##teamcity[testStarted name='A TEST_CASE_METHOD based test run that succeeds'] ##teamcity[testFinished name='A TEST_CASE_METHOD based test run that succeeds' duration="{duration}"] +##teamcity[testStarted name='A TEST_CASE_PERSISTENT_FIXTURE based test run that fails'] +##teamcity[testFailed name='A TEST_CASE_PERSISTENT_FIXTURE based test run that fails' message='-------------------------------------------------------------------------------|nSecond partial run|n-------------------------------------------------------------------------------|nClass.tests.cpp:|n...............................................................................|n|nClass.tests.cpp:|nexpression failed|n REQUIRE( m_a == 0 )|nwith expansion:|n 1 == 0|n'] +##teamcity[testFinished name='A TEST_CASE_PERSISTENT_FIXTURE based test run that fails' duration="{duration}"] +##teamcity[testStarted name='A TEST_CASE_PERSISTENT_FIXTURE based test run that succeeds'] +##teamcity[testFinished name='A TEST_CASE_PERSISTENT_FIXTURE based test run that succeeds' duration="{duration}"] ##teamcity[testStarted name='A Template product test case - Foo'] ##teamcity[testFinished name='A Template product test case - Foo' duration="{duration}"] ##teamcity[testStarted name='A Template product test case - Foo'] diff --git a/third_party/Catch2/tests/SelfTest/Baselines/xml.sw.approved.txt b/third_party/Catch2/tests/SelfTest/Baselines/xml.sw.approved.txt index f8acf1d4..a00e4577 100644 --- a/third_party/Catch2/tests/SelfTest/Baselines/xml.sw.approved.txt +++ b/third_party/Catch2/tests/SelfTest/Baselines/xml.sw.approved.txt @@ -2056,6 +2056,56 @@ Nor would this + +
+ + + m_a++ == 0 + + + 0 == 0 + + + +
+
+ + + m_a == 0 + + + 1 == 0 + + + +
+ +
+ +
+ + + m_a++ == 0 + + + 0 == 0 + + + +
+
+ + + m_a == 1 + + + 1 == 1 + + + +
+ +
@@ -21832,6 +21882,6 @@ Approx( -1.95996398454005449 ) - - + + diff --git a/third_party/Catch2/tests/SelfTest/Baselines/xml.sw.multi.approved.txt b/third_party/Catch2/tests/SelfTest/Baselines/xml.sw.multi.approved.txt index 5d815a51..08a1804a 100644 --- a/third_party/Catch2/tests/SelfTest/Baselines/xml.sw.multi.approved.txt +++ b/third_party/Catch2/tests/SelfTest/Baselines/xml.sw.multi.approved.txt @@ -2056,6 +2056,56 @@ Nor would this
+ +
+ + + m_a++ == 0 + + + 0 == 0 + + + +
+
+ + + m_a == 0 + + + 1 == 0 + + + +
+ +
+ +
+ + + m_a++ == 0 + + + 0 == 0 + + + +
+
+ + + m_a == 1 + + + 1 == 1 + + + +
+ +
@@ -21831,6 +21881,6 @@ Approx( -1.95996398454005449 ) - - + + diff --git a/third_party/Catch2/tests/SelfTest/UsageTests/Class.tests.cpp b/third_party/Catch2/tests/SelfTest/UsageTests/Class.tests.cpp index 682171da..75510f10 100644 --- a/third_party/Catch2/tests/SelfTest/UsageTests/Class.tests.cpp +++ b/third_party/Catch2/tests/SelfTest/UsageTests/Class.tests.cpp @@ -32,6 +32,10 @@ namespace { int m_a; }; + struct Persistent_Fixture { + mutable int m_a = 0; + }; + template struct Template_Fixture { Template_Fixture(): m_a( 1 ) {} @@ -64,6 +68,17 @@ TEST_CASE_METHOD( Fixture, "A TEST_CASE_METHOD based test run that succeeds", "[ REQUIRE( m_a == 1 ); } +TEST_CASE_PERSISTENT_FIXTURE( Persistent_Fixture, "A TEST_CASE_PERSISTENT_FIXTURE based test run that succeeds", "[class]" ) +{ + SECTION( "First partial run" ) { + REQUIRE( m_a++ == 0 ); + } + + SECTION( "Second partial run" ) { + REQUIRE( m_a == 1 ); + } +} + TEMPLATE_TEST_CASE_METHOD(Template_Fixture, "A TEMPLATE_TEST_CASE_METHOD based test run that succeeds", "[class][template]", int, float, double) { REQUIRE( Template_Fixture::m_a == 1 ); } @@ -96,6 +111,17 @@ namespace Inner REQUIRE( m_a == 2 ); } + TEST_CASE_PERSISTENT_FIXTURE( Persistent_Fixture, "A TEST_CASE_PERSISTENT_FIXTURE based test run that fails", "[.][class][failing]" ) + { + SECTION( "First partial run" ) { + REQUIRE( m_a++ == 0 ); + } + + SECTION( "Second partial run" ) { + REQUIRE( m_a == 0 ); + } + } + TEMPLATE_TEST_CASE_METHOD(Template_Fixture,"A TEMPLATE_TEST_CASE_METHOD based test run that fails", "[.][class][template][failing]", int, float, double) { REQUIRE( Template_Fixture::m_a == 2 ); diff --git a/third_party/Imath/.github/workflows/ci_workflow.yml b/third_party/Imath/.github/workflows/ci_workflow.yml index 6bb2c7f5..3cfd3783 100644 --- a/third_party/Imath/.github/workflows/ci_workflow.yml +++ b/third_party/Imath/.github/workflows/ci_workflow.yml @@ -63,9 +63,9 @@ jobs: build: [1, 2, 3, 4, 5, 6, 7, 8, 9] include: # ------------------------------------------------------------------- - # GCC, VFX CY2023 + # GCC, VFX CY2024 # ------------------------------------------------------------------- - # C++17, Python 3.10.9 + # C++17, Python 3.11 - build: 1 build-type: Release build-shared: 'ON' @@ -74,10 +74,10 @@ jobs: cc-compiler: gcc compiler-desc: gcc11.2.1 python: 'ON' - python-desc: python3.10.9 - vfx-cy: 2023 + python-desc: python3.11 + vfx-cy: 2024 - # C++17, Python 3.10.9, Debug + # C++17, Python 3.11, Debug - build: 2 build-type: Debug build-shared: 'ON' @@ -86,10 +86,10 @@ jobs: cc-compiler: gcc compiler-desc: gcc11.2.1 python: 'ON' - python-desc: python3.10.9 - vfx-cy: 2023 + python-desc: python3.11 + vfx-cy: 2024 - # C++17, Python 3.10.9, Static + # C++17, Python 3.11, Static - build: 3 build-type: Release build-shared: 'OFF' @@ -98,10 +98,10 @@ jobs: cc-compiler: gcc compiler-desc: gcc11.2.1 python: 'ON' - python-desc: python3.10.9 - vfx-cy: 2023 + python-desc: python3.11 + vfx-cy: 2024 - # C++14, Python 3.10.9 + # C++14, Python 3.11 - build: 4 build-type: Release build-shared: 'ON' @@ -110,13 +110,13 @@ jobs: cc-compiler: gcc compiler-desc: gcc11.2.1 python: 'ON' - python-desc: python3.10.9 - vfx-cy: 2023 + python-desc: python3.11 + vfx-cy: 2024 # ------------------------------------------------------------------- - # Clang 15.0, VFX CY2023 + # Clang 15.0, VFX CY2024 # ------------------------------------------------------------------- - # C++17, Python 3.10.9 + # C++17, Python 3.11 - build: 5 build-type: Release build-shared: 'ON' @@ -125,13 +125,13 @@ jobs: cc-compiler: clang compiler-desc: clang15.0 python: 'ON' - python-desc: python3.10.9 - vfx-cy: 2023 + python-desc: python3.11 + vfx-cy: 2024 # ------------------------------------------------------------------- - # Clang 14.0, VFX CY2023 + # Clang 14.0, VFX CY2024 # ------------------------------------------------------------------- - # C++17, Python 3.10.9 + # C++17, Python 3.11 - build: 6 build-type: Release build-shared: 'ON' @@ -140,28 +140,28 @@ jobs: cc-compiler: clang compiler-desc: clang14.0 python: 'ON' - python-desc: python3.10.9 - vfx-cy: 2023 + python-desc: python3.11 + vfx-cy: 2024 # ------------------------------------------------------------------- - # GCC, VFX CY2022 + # GCC, VFX CY2023 # ------------------------------------------------------------------- - # C++17, Python 3.9 + # C++17, Python 3.10.9 - build: 7 build-type: Release build-shared: 'ON' cxx-standard: 17 cxx-compiler: g++ cc-compiler: gcc - compiler-desc: gcc9.3.1 + compiler-desc: gcc11.2.1 python: 'ON' - python-desc: python3.9.7 - vfx-cy: 2022 + python-desc: python3.10.9 + vfx-cy: 2023 # ------------------------------------------------------------------- - # GCC, VFX CY2021 + # GCC, VFX CY2022 # ------------------------------------------------------------------- - # C++17, Python 3.7.9 + # C++17, Python 3.9 - build: 8 build-type: Release build-shared: 'ON' @@ -170,27 +170,29 @@ jobs: cc-compiler: gcc compiler-desc: gcc9.3.1 python: 'ON' - python-desc: python3.7.9 - vfx-cy: 2021 + python-desc: python3.9.7 + vfx-cy: 2022 # ------------------------------------------------------------------- - # GCC, VFX CY2020 + # GCC, VFX CY2021 # ------------------------------------------------------------------- - # C++14, Python 3.7 + # C++17, Python 3.7.9 - build: 9 build-type: Release build-shared: 'ON' - cxx-standard: 14 + cxx-standard: 17 cxx-compiler: g++ cc-compiler: gcc - compiler-desc: gcc6.3.1 + compiler-desc: gcc9.3.1 python: 'ON' - python-desc: python3.7.3 - vfx-cy: 2020 + python-desc: python3.7.9 + vfx-cy: 2021 env: CXX: ${{ matrix.cxx-compiler }} CC: ${{ matrix.cc-compiler }} + ACTIONS_RUNNER_FORCE_ACTIONS_NODE_VERSION: node16 + ACTIONS_ALLOW_USE_UNSECURE_NODE_VERSION: true steps: - name: Checkout uses: actions/checkout@v2 @@ -219,9 +221,12 @@ jobs: working-directory: _build - name: Examples run: | - # Confirm the python module loads. Query the site-packages directory and substitute ../_install - export PYTHONPATH=`python -c "import site; print('../_install%s' % site.USER_SITE[len(site.USER_BASE):])"` + # Confirm the python module loads. Set PYTHONPATH to the + # _install directory of the module (better to find it + # procedurally than hard code a path). + export PYTHONPATH=`find ../_install -name imath.so | xargs dirname` python -c "import imath;print(imath.__version__)" + # Make sure we can build the tests when configured as a # standalone application linking against the just-installed # Imath library. @@ -232,8 +237,10 @@ jobs: -DCMAKE_CXX_FLAGS=${{ matrix.cxx-flags }} cmake --build . \ --config ${{ matrix.build-type }} + # Confirm the tests run ./bin/ImathTest + # Confirm the examples compile and execute rm -rf bin CMakeCache.txt CMakeFiles cmake_install.cmake Makefile cmake ../website/examples \ @@ -267,18 +274,18 @@ jobs: runs-on: macos-${{ matrix.osver }} strategy: matrix: - build: [1, 2, 3, 4, 5, 6] + build: [1, 2, 3, 4, 5, 6, 7] include: # -------------------------------------------------------------------- - # VFX CY2023 - MacOS 11.0 + # MacOS 14 # -------------------------------------------------------------------- - # C++11 + # Release - build: 1 build-type: Release build-shared: 'ON' cxx-standard: 17 exclude-tests: - osver: 11.0 + osver: 14 # Debug - build: 2 @@ -286,8 +293,7 @@ jobs: build-shared: 'ON' cxx-standard: 17 exclude-tests: - osver: 11.0 - + osver: 14 # Static - build: 3 @@ -295,8 +301,7 @@ jobs: build-shared: 'OFF' cxx-standard: 17 exclude-tests: - osver: 11.0 - + osver: 14 # C++14 - build: 4 @@ -304,8 +309,7 @@ jobs: build-shared: 'ON' cxx-standard: 14 exclude-tests: - osver: 11.0 - + osver: 14 # C++11 - build: 5 @@ -313,18 +317,27 @@ jobs: build-shared: 'ON' cxx-standard: 11 exclude-tests: - osver: 11.0 + osver: 14 # -------------------------------------------------------------------- - # VFX CY2022 - MacOS 12 + # MacOS 13 # -------------------------------------------------------------------- - # C++11 - build: 6 build-type: Release build-shared: 'ON' cxx-standard: 17 exclude-tests: - osver: 12.0 + osver: 13 + + # -------------------------------------------------------------------- + # MacOS 12 + # -------------------------------------------------------------------- + - build: 7 + build-type: Release + build-shared: 'ON' + cxx-standard: 17 + exclude-tests: + osver: 12 steps: ## - name: Setup Python diff --git a/third_party/Imath/.github/workflows/codeql.yml b/third_party/Imath/.github/workflows/codeql.yml new file mode 100644 index 00000000..39893fb0 --- /dev/null +++ b/third_party/Imath/.github/workflows/codeql.yml @@ -0,0 +1,91 @@ +# For most projects, this workflow file will not need changing; you simply need +# to commit it to your repository. +# +# You may wish to alter this file to override the set of languages analyzed, +# or to provide custom queries or build logic. +# +# ******** NOTE ******** +# We have attempted to detect the languages in your repository. Please check +# the `language` matrix defined below to confirm you have the correct set of +# supported CodeQL languages. +# +name: "CodeQL" + +on: + push: + branches: [ "main", "**/**" ] + pull_request: + branches: [ "main", "**/**" ] + schedule: + - cron: '37 10 * * 0' + +jobs: + analyze: + name: Analyze (${{ matrix.language }}) + # Runner size impacts CodeQL analysis time. To learn more, please see: + # - https://gh.io/recommended-hardware-resources-for-running-codeql + # - https://gh.io/supported-runners-and-hardware-resources + # - https://gh.io/using-larger-runners (GitHub.com only) + # Consider using larger runners or machines with greater resources for possible analysis time improvements. + runs-on: ${{ (matrix.language == 'swift' && 'macos-latest') || 'ubuntu-latest' }} + timeout-minutes: ${{ (matrix.language == 'swift' && 120) || 360 }} + permissions: + # required for all workflows + security-events: write + + # only required for workflows in private repositories + actions: read + contents: read + + strategy: + fail-fast: false + matrix: + include: + - language: c-cpp + build-mode: autobuild + - language: python + build-mode: none + # CodeQL supports the following values keywords for 'language': 'c-cpp', 'csharp', 'go', 'java-kotlin', 'javascript-typescript', 'python', 'ruby', 'swift' + # Use `c-cpp` to analyze code written in C, C++ or both + # Use 'java-kotlin' to analyze code written in Java, Kotlin or both + # Use 'javascript-typescript' to analyze code written in JavaScript, TypeScript or both + # To learn more about changing the languages that are analyzed or customizing the build mode for your analysis, + # see https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/customizing-your-advanced-setup-for-code-scanning. + # If you are analyzing a compiled language, you can modify the 'build-mode' for that language to customize how + # your codebase is analyzed, see https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/codeql-code-scanning-for-compiled-languages + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + # Initializes the CodeQL tools for scanning. + - name: Initialize CodeQL + uses: github/codeql-action/init@v3 + with: + languages: ${{ matrix.language }} + build-mode: ${{ matrix.build-mode }} + # If you wish to specify custom queries, you can do so here or in a config file. + # By default, queries listed here will override any specified in a config file. + # Prefix the list here with "+" to use these queries and those in the config file. + + # For more details on CodeQL's query packs, refer to: https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs + # queries: security-extended,security-and-quality + + # If the analyze step fails for one of the languages you are analyzing with + # "We were unable to automatically build your code", modify the matrix above + # to set the build mode to "manual" for that language. Then modify this step + # to build your code. + # ℹ️ Command-line programs to run using the OS shell. + # 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun + - if: matrix.build-mode == 'manual' + run: | + echo 'If you are using a "manual" build mode for one or more of the' \ + 'languages you are analyzing, replace this with the commands to build' \ + 'your code, for example:' + echo ' make bootstrap' + echo ' make release' + exit 1 + + - name: Perform CodeQL Analysis + uses: github/codeql-action/analyze@v3 + with: + category: "/language:${{matrix.language}}" diff --git a/third_party/Imath/.github/workflows/release-sign.yml b/third_party/Imath/.github/workflows/release-sign.yml index bf6ab58b..38663e81 100644 --- a/third_party/Imath/.github/workflows/release-sign.yml +++ b/third_party/Imath/.github/workflows/release-sign.yml @@ -54,7 +54,7 @@ jobs: run: git archive --format=tar.gz -o ${IMATH_TARBALL} --prefix ${IMATH_PREFIX} ${TAG} - name: Sign archive with Sigstore - uses: sigstore/gh-action-sigstore-python@v2.1.1 + uses: sigstore/gh-action-sigstore-python@v3.0.0 with: inputs: ${{ env.IMATH_TARBALL }} diff --git a/third_party/Imath/.github/workflows/scorecard.yml b/third_party/Imath/.github/workflows/scorecard.yml index 6aeab45d..e6892231 100644 --- a/third_party/Imath/.github/workflows/scorecard.yml +++ b/third_party/Imath/.github/workflows/scorecard.yml @@ -30,7 +30,7 @@ jobs: persist-credentials: false - name: "Run analysis" - uses: ossf/scorecard-action@0864cf19026789058feabb7e87baa5f140aac736 # v2.3.1 + uses: ossf/scorecard-action@62b2cac7ed8198b15735ed49ab1e5cf35480ba46 # v2.4.0 with: results_file: results.sarif results_format: sarif @@ -43,7 +43,7 @@ jobs: # Upload the results as artifacts (optional) - name: "Upload artifact" - uses: actions/upload-artifact@1746f4ab65b179e0ea60a494b83293b640dd5bba # v4.3.2 + uses: actions/upload-artifact@834a144ee995460fba8ed112a2fc961b36a5ec5a # v4.3.6 with: name: SARIF file path: results.sarif @@ -51,6 +51,6 @@ jobs: # Upload the results to GitHub's code scanning dashboard. - name: "Upload to code-scanning" - uses: github/codeql-action/upload-sarif@c7f9125735019aa87cfc361530512d50ea439c71 # v3.25.1 + uses: github/codeql-action/upload-sarif@4fa2a7953630fd2f3fb380f21be14ede0169dd4f # v3.25.12 with: sarif_file: results.sarif diff --git a/third_party/Imath/website/requirements.txt b/third_party/Imath/website/requirements.txt index 15e95b51..e4ca40a7 100644 --- a/third_party/Imath/website/requirements.txt +++ b/third_party/Imath/website/requirements.txt @@ -1,6 +1,6 @@ # SPDX-License-Identifier: BSD-3-Clause # Copyright (c) Contributors to the OpenEXR Project. -sphinx == 7.2.6 +sphinx == 7.4.2 breathe sphinx-press-theme diff --git a/third_party/abseil-cpp/CMake/AbseilDll.cmake b/third_party/abseil-cpp/CMake/AbseilDll.cmake index a81cbaf3..a3fb15a1 100644 --- a/third_party/abseil-cpp/CMake/AbseilDll.cmake +++ b/third_party/abseil-cpp/CMake/AbseilDll.cmake @@ -49,6 +49,8 @@ set(ABSL_INTERNAL_DLL_FILES "base/internal/thread_identity.h" "base/internal/throw_delegate.cc" "base/internal/throw_delegate.h" + "base/internal/tracing.cc" + "base/internal/tracing.h" "base/internal/tsan_mutex_interface.h" "base/internal/unaligned_access.h" "base/internal/unscaledcycleclock.cc" @@ -321,7 +323,6 @@ set(ABSL_INTERNAL_DLL_FILES "strings/internal/string_constant.h" "strings/internal/stringify_sink.h" "strings/internal/stringify_sink.cc" - "strings/internal/has_absl_stringify.h" "strings/has_absl_stringify.h" "strings/has_ostream_operator.h" "strings/match.cc" @@ -487,6 +488,7 @@ endif() set(ABSL_INTERNAL_DLL_TARGETS "absl_check" "absl_log" + "absl_vlog_is_on" "algorithm" "algorithm_container" "any" @@ -644,6 +646,7 @@ set(ABSL_INTERNAL_DLL_TARGETS "utility" "variant" "vlog_config_internal" + "vlog_is_on" ) if(NOT MSVC) diff --git a/third_party/abseil-cpp/CMake/AbseilHelpers.cmake b/third_party/abseil-cpp/CMake/AbseilHelpers.cmake index 70a37f11..d8fb9fe5 100644 --- a/third_party/abseil-cpp/CMake/AbseilHelpers.cmake +++ b/third_party/abseil-cpp/CMake/AbseilHelpers.cmake @@ -186,16 +186,16 @@ function(absl_cc_library) endif() endif() endforeach() - set(skip_next_cflag OFF) foreach(cflag ${ABSL_CC_LIB_COPTS}) - if(skip_next_cflag) - set(skip_next_cflag OFF) - elseif(${cflag} MATCHES "^-Xarch_") + # Strip out the CMake-specific `SHELL:` prefix, which is used to construct + # a group of space-separated options. + # https://cmake.org/cmake/help/v3.30/command/target_compile_options.html#option-de-duplication + string(REGEX REPLACE "^SHELL:" "" cflag "${cflag}") + if(${cflag} MATCHES "^-Xarch_") # An -Xarch_ flag implies that its successor only applies to the - # specified platform. Filter both of them out before the successor - # reaches the "^-m" filter. - set(skip_next_cflag ON) - elseif(${cflag} MATCHES "^(-Wno|/wd)") + # specified platform. Such option groups are each specified in a single + # `SHELL:`-prefixed string in the COPTS list, which we simply ignore. + elseif(${cflag} MATCHES "^(-Wno-|/wd)") # These flags are needed to suppress warnings that might fire in our headers. set(PC_CFLAGS "${PC_CFLAGS} ${cflag}") elseif(${cflag} MATCHES "^(-W|/w[1234eo])") @@ -258,6 +258,13 @@ Cflags: -I\${includedir}${PC_CFLAGS}\n") elseif(_build_type STREQUAL "static" OR _build_type STREQUAL "shared") add_library(${_NAME} "") target_sources(${_NAME} PRIVATE ${ABSL_CC_LIB_SRCS} ${ABSL_CC_LIB_HDRS}) + if(APPLE) + set_target_properties(${_NAME} PROPERTIES + INSTALL_RPATH "@loader_path") + elseif(UNIX) + set_target_properties(${_NAME} PROPERTIES + INSTALL_RPATH "$ORIGIN") + endif() target_link_libraries(${_NAME} PUBLIC ${ABSL_CC_LIB_DEPS} PRIVATE diff --git a/third_party/abseil-cpp/CMake/install_test_project/test.sh b/third_party/abseil-cpp/CMake/install_test_project/test.sh index cc028bac..962bc8d2 100755 --- a/third_party/abseil-cpp/CMake/install_test_project/test.sh +++ b/third_party/abseil-cpp/CMake/install_test_project/test.sh @@ -22,7 +22,8 @@ set -euox pipefail absl_dir=/abseil-cpp absl_build_dir=/buildfs googletest_builddir=/googletest_builddir -project_dir="${absl_dir}"/CMake/install_test_project +googletest_archive="googletest-${ABSL_GOOGLETEST_VERSION}.tar.gz" +project_dir="${absl_dir}/CMake/install_test_project" project_build_dir=/buildfs/project-build build_shared_libs="OFF" @@ -33,9 +34,9 @@ fi # Build and install GoogleTest mkdir "${googletest_builddir}" pushd "${googletest_builddir}" -curl -L "${ABSL_GOOGLETEST_DOWNLOAD_URL}" --output "${ABSL_GOOGLETEST_COMMIT}".zip -unzip "${ABSL_GOOGLETEST_COMMIT}".zip -pushd "googletest-${ABSL_GOOGLETEST_COMMIT}" +curl -L "${ABSL_GOOGLETEST_DOWNLOAD_URL}" --output "${googletest_archive}" +tar -xz -f "${googletest_archive}" +pushd "googletest-${ABSL_GOOGLETEST_VERSION}" mkdir build pushd build cmake -DCMAKE_BUILD_TYPE=Release -DBUILD_SHARED_LIBS="${build_shared_libs}" .. diff --git a/third_party/abseil-cpp/MODULE.bazel b/third_party/abseil-cpp/MODULE.bazel index fc1534a7..d9eb0f17 100644 --- a/third_party/abseil-cpp/MODULE.bazel +++ b/third_party/abseil-cpp/MODULE.bazel @@ -35,8 +35,8 @@ bazel_dep(name = "google_benchmark", dev_dependency = True) bazel_dep(name = "googletest", - version = "1.14.0.bcr.1", + version = "1.15.2", repo_name = "com_google_googletest") bazel_dep(name = "platforms", - version = "0.0.8") + version = "0.0.10") diff --git a/third_party/abseil-cpp/WORKSPACE b/third_party/abseil-cpp/WORKSPACE index 0d886091..dee6d05e 100644 --- a/third_party/abseil-cpp/WORKSPACE +++ b/third_party/abseil-cpp/WORKSPACE @@ -20,20 +20,21 @@ load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive") # GoogleTest/GoogleMock framework. Used by most unit-tests. http_archive( - name = "com_google_googletest", - sha256 = "8ad598c73ad796e0d8280b082cebd82a630d73e73cd3c70057938a6501bba5d7", - strip_prefix = "googletest-1.14.0", - # Keep this URL in sync with ABSL_GOOGLETEST_COMMIT in ci/cmake_common.sh and - # ci/windows_msvc_cmake.bat. - urls = ["https://github.com/google/googletest/archive/refs/tags/v1.14.0.tar.gz"], + name = "com_google_googletest", + sha256 = "7b42b4d6ed48810c5362c265a17faebe90dc2373c885e5216439d37927f02926", + strip_prefix = "googletest-1.15.2", + # Keep this URL in sync with the version in ci/cmake_common.sh and + # ci/windows_msvc_cmake.bat. + urls = ["https://github.com/google/googletest/releases/download/v1.15.2/googletest-1.15.2.tar.gz"], ) # RE2 (the regular expression library used by GoogleTest) http_archive( name = "com_googlesource_code_re2", - sha256 = "828341ad08524618a626167bd320b0c2acc97bd1c28eff693a9ea33a7ed2a85f", - strip_prefix = "re2-2023-11-01", - urls = ["https://github.com/google/re2/releases/download/2023-11-01/re2-2023-11-01.zip"], + sha256 = "eb2df807c781601c14a260a507a5bb4509be1ee626024cb45acbd57cb9d4032b", + strip_prefix = "re2-2024-07-02", + urls = ["https://github.com/google/re2/releases/download/2024-07-02/re2-2024-07-02.tar.gz"], + repo_mapping = {"@abseil-cpp": "@com_google_absl"}, ) # Google benchmark. @@ -46,14 +47,17 @@ http_archive( # Bazel Skylib. http_archive( - name = "bazel_skylib", - sha256 = "cd55a062e763b9349921f0f5db8c3933288dc8ba4f76dd9416aac68acee3cb94", - urls = ["https://github.com/bazelbuild/bazel-skylib/releases/download/1.5.0/bazel-skylib-1.5.0.tar.gz"], + name = "bazel_skylib", + sha256 = "cd55a062e763b9349921f0f5db8c3933288dc8ba4f76dd9416aac68acee3cb94", + urls = ["https://github.com/bazelbuild/bazel-skylib/releases/download/1.5.0/bazel-skylib-1.5.0.tar.gz"], ) # Bazel platform rules. http_archive( name = "platforms", - sha256 = "8150406605389ececb6da07cbcb509d5637a3ab9a24bc69b1101531367d89d74", - urls = ["https://github.com/bazelbuild/platforms/releases/download/0.0.8/platforms-0.0.8.tar.gz"], + urls = [ + "https://mirror.bazel.build/github.com/bazelbuild/platforms/releases/download/0.0.10/platforms-0.0.10.tar.gz", + "https://github.com/bazelbuild/platforms/releases/download/0.0.10/platforms-0.0.10.tar.gz", + ], + sha256 = "218efe8ee736d26a3572663b374a253c012b716d8af0c07e842e82f238a0a7ee", ) diff --git a/third_party/abseil-cpp/absl/algorithm/algorithm.h b/third_party/abseil-cpp/absl/algorithm/algorithm.h index 59aeed7d..48f59504 100644 --- a/third_party/abseil-cpp/absl/algorithm/algorithm.h +++ b/third_party/abseil-cpp/absl/algorithm/algorithm.h @@ -53,8 +53,8 @@ using std::rotate; // n = (`last` - `first`) comparisons. A linear search over short containers // may be faster than a binary search, even when the container is sorted. template -bool linear_search(InputIterator first, InputIterator last, - const EqualityComparable& value) { +ABSL_INTERNAL_CONSTEXPR_SINCE_CXX20 bool linear_search( + InputIterator first, InputIterator last, const EqualityComparable& value) { return std::find(first, last, value) != last; } diff --git a/third_party/abseil-cpp/absl/algorithm/algorithm_test.cc b/third_party/abseil-cpp/absl/algorithm/algorithm_test.cc index e6ee4695..1c1a3079 100644 --- a/third_party/abseil-cpp/absl/algorithm/algorithm_test.cc +++ b/third_party/abseil-cpp/absl/algorithm/algorithm_test.cc @@ -14,11 +14,9 @@ #include "absl/algorithm/algorithm.h" -#include -#include +#include #include -#include "gmock/gmock.h" #include "gtest/gtest.h" #include "absl/base/config.h" @@ -47,4 +45,16 @@ TEST_F(LinearSearchTest, linear_searchConst) { absl::linear_search(const_container->begin(), const_container->end(), 4)); } +#if defined(ABSL_INTERNAL_CPLUSPLUS_LANG) && \ + ABSL_INTERNAL_CPLUSPLUS_LANG >= 202002L + +TEST_F(LinearSearchTest, Constexpr) { + static constexpr std::array kArray = {1, 2, 3}; + static_assert(absl::linear_search(kArray.begin(), kArray.end(), 3)); + static_assert(!absl::linear_search(kArray.begin(), kArray.end(), 4)); +} + +#endif // defined(ABSL_INTERNAL_CPLUSPLUS_LANG) && + // ABSL_INTERNAL_CPLUSPLUS_LANG >= 202002L + } // namespace diff --git a/third_party/abseil-cpp/absl/algorithm/container.h b/third_party/abseil-cpp/absl/algorithm/container.h index 6bbe3b5a..2c0426da 100644 --- a/third_party/abseil-cpp/absl/algorithm/container.h +++ b/third_party/abseil-cpp/absl/algorithm/container.h @@ -132,7 +132,8 @@ struct IsUnorderedContainer> // Container-based version of absl::linear_search() for performing a linear // search within a container. template -bool c_linear_search(const C& c, EqualityComparable&& value) { +ABSL_INTERNAL_CONSTEXPR_SINCE_CXX20 bool c_linear_search( + const C& c, EqualityComparable&& value) { return linear_search(container_algorithm_internal::c_begin(c), container_algorithm_internal::c_end(c), std::forward(value)); @@ -163,7 +164,7 @@ ABSL_INTERNAL_CONSTEXPR_SINCE_CXX17 // Container-based version of the `std::all_of()` function to // test if all elements within a container satisfy a condition. template -bool c_all_of(const C& c, Pred&& pred) { +ABSL_INTERNAL_CONSTEXPR_SINCE_CXX20 bool c_all_of(const C& c, Pred&& pred) { return std::all_of(container_algorithm_internal::c_begin(c), container_algorithm_internal::c_end(c), std::forward(pred)); @@ -174,7 +175,7 @@ bool c_all_of(const C& c, Pred&& pred) { // Container-based version of the `std::any_of()` function to // test if any element in a container fulfills a condition. template -bool c_any_of(const C& c, Pred&& pred) { +ABSL_INTERNAL_CONSTEXPR_SINCE_CXX20 bool c_any_of(const C& c, Pred&& pred) { return std::any_of(container_algorithm_internal::c_begin(c), container_algorithm_internal::c_end(c), std::forward(pred)); @@ -185,7 +186,7 @@ bool c_any_of(const C& c, Pred&& pred) { // Container-based version of the `std::none_of()` function to // test if no elements in a container fulfill a condition. template -bool c_none_of(const C& c, Pred&& pred) { +ABSL_INTERNAL_CONSTEXPR_SINCE_CXX20 bool c_none_of(const C& c, Pred&& pred) { return std::none_of(container_algorithm_internal::c_begin(c), container_algorithm_internal::c_end(c), std::forward(pred)); @@ -196,7 +197,8 @@ bool c_none_of(const C& c, Pred&& pred) { // Container-based version of the `std::for_each()` function to // apply a function to a container's elements. template -decay_t c_for_each(C&& c, Function&& f) { +ABSL_INTERNAL_CONSTEXPR_SINCE_CXX20 decay_t c_for_each(C&& c, + Function&& f) { return std::for_each(container_algorithm_internal::c_begin(c), container_algorithm_internal::c_end(c), std::forward(f)); @@ -207,7 +209,9 @@ decay_t c_for_each(C&& c, Function&& f) { // Container-based version of the `std::find()` function to find // the first element containing the passed value within a container value. template -container_algorithm_internal::ContainerIter c_find(C& c, T&& value) { +ABSL_INTERNAL_CONSTEXPR_SINCE_CXX20 + container_algorithm_internal::ContainerIter + c_find(C& c, T&& value) { return std::find(container_algorithm_internal::c_begin(c), container_algorithm_internal::c_end(c), std::forward(value)); @@ -218,7 +222,8 @@ container_algorithm_internal::ContainerIter c_find(C& c, T&& value) { // Container-based version of the `std::ranges::contains()` C++23 // function to search a container for a value. template -bool c_contains(const Sequence& sequence, T&& value) { +ABSL_INTERNAL_CONSTEXPR_SINCE_CXX20 bool c_contains(const Sequence& sequence, + T&& value) { return absl::c_find(sequence, std::forward(value)) != container_algorithm_internal::c_end(sequence); } @@ -228,7 +233,9 @@ bool c_contains(const Sequence& sequence, T&& value) { // Container-based version of the `std::find_if()` function to find // the first element in a container matching the given condition. template -container_algorithm_internal::ContainerIter c_find_if(C& c, Pred&& pred) { +ABSL_INTERNAL_CONSTEXPR_SINCE_CXX20 + container_algorithm_internal::ContainerIter + c_find_if(C& c, Pred&& pred) { return std::find_if(container_algorithm_internal::c_begin(c), container_algorithm_internal::c_end(c), std::forward(pred)); @@ -239,8 +246,9 @@ container_algorithm_internal::ContainerIter c_find_if(C& c, Pred&& pred) { // Container-based version of the `std::find_if_not()` function to // find the first element in a container not matching the given condition. template -container_algorithm_internal::ContainerIter c_find_if_not(C& c, - Pred&& pred) { +ABSL_INTERNAL_CONSTEXPR_SINCE_CXX20 + container_algorithm_internal::ContainerIter + c_find_if_not(C& c, Pred&& pred) { return std::find_if_not(container_algorithm_internal::c_begin(c), container_algorithm_internal::c_end(c), std::forward(pred)); @@ -251,8 +259,9 @@ container_algorithm_internal::ContainerIter c_find_if_not(C& c, // Container-based version of the `std::find_end()` function to // find the last subsequence within a container. template -container_algorithm_internal::ContainerIter c_find_end( - Sequence1& sequence, Sequence2& subsequence) { +ABSL_INTERNAL_CONSTEXPR_SINCE_CXX20 + container_algorithm_internal::ContainerIter + c_find_end(Sequence1& sequence, Sequence2& subsequence) { return std::find_end(container_algorithm_internal::c_begin(sequence), container_algorithm_internal::c_end(sequence), container_algorithm_internal::c_begin(subsequence), @@ -262,8 +271,10 @@ container_algorithm_internal::ContainerIter c_find_end( // Overload of c_find_end() for using a predicate evaluation other than `==` as // the function's test condition. template -container_algorithm_internal::ContainerIter c_find_end( - Sequence1& sequence, Sequence2& subsequence, BinaryPredicate&& pred) { +ABSL_INTERNAL_CONSTEXPR_SINCE_CXX20 + container_algorithm_internal::ContainerIter + c_find_end(Sequence1& sequence, Sequence2& subsequence, + BinaryPredicate&& pred) { return std::find_end(container_algorithm_internal::c_begin(sequence), container_algorithm_internal::c_end(sequence), container_algorithm_internal::c_begin(subsequence), @@ -277,8 +288,9 @@ container_algorithm_internal::ContainerIter c_find_end( // find the first element within the container that is also within the options // container. template -container_algorithm_internal::ContainerIter c_find_first_of(C1& container, - C2& options) { +ABSL_INTERNAL_CONSTEXPR_SINCE_CXX20 + container_algorithm_internal::ContainerIter + c_find_first_of(C1& container, C2& options) { return std::find_first_of(container_algorithm_internal::c_begin(container), container_algorithm_internal::c_end(container), container_algorithm_internal::c_begin(options), @@ -288,8 +300,9 @@ container_algorithm_internal::ContainerIter c_find_first_of(C1& container, // Overload of c_find_first_of() for using a predicate evaluation other than // `==` as the function's test condition. template -container_algorithm_internal::ContainerIter c_find_first_of( - C1& container, C2& options, BinaryPredicate&& pred) { +ABSL_INTERNAL_CONSTEXPR_SINCE_CXX20 + container_algorithm_internal::ContainerIter + c_find_first_of(C1& container, C2& options, BinaryPredicate&& pred) { return std::find_first_of(container_algorithm_internal::c_begin(container), container_algorithm_internal::c_end(container), container_algorithm_internal::c_begin(options), @@ -302,8 +315,9 @@ container_algorithm_internal::ContainerIter c_find_first_of( // Container-based version of the `std::adjacent_find()` function to // find equal adjacent elements within a container. template -container_algorithm_internal::ContainerIter c_adjacent_find( - Sequence& sequence) { +ABSL_INTERNAL_CONSTEXPR_SINCE_CXX20 + container_algorithm_internal::ContainerIter + c_adjacent_find(Sequence& sequence) { return std::adjacent_find(container_algorithm_internal::c_begin(sequence), container_algorithm_internal::c_end(sequence)); } @@ -311,8 +325,9 @@ container_algorithm_internal::ContainerIter c_adjacent_find( // Overload of c_adjacent_find() for using a predicate evaluation other than // `==` as the function's test condition. template -container_algorithm_internal::ContainerIter c_adjacent_find( - Sequence& sequence, BinaryPredicate&& pred) { +ABSL_INTERNAL_CONSTEXPR_SINCE_CXX20 + container_algorithm_internal::ContainerIter + c_adjacent_find(Sequence& sequence, BinaryPredicate&& pred) { return std::adjacent_find(container_algorithm_internal::c_begin(sequence), container_algorithm_internal::c_end(sequence), std::forward(pred)); @@ -323,8 +338,9 @@ container_algorithm_internal::ContainerIter c_adjacent_find( // Container-based version of the `std::count()` function to count // values that match within a container. template -container_algorithm_internal::ContainerDifferenceType c_count( - const C& c, T&& value) { +ABSL_INTERNAL_CONSTEXPR_SINCE_CXX20 + container_algorithm_internal::ContainerDifferenceType + c_count(const C& c, T&& value) { return std::count(container_algorithm_internal::c_begin(c), container_algorithm_internal::c_end(c), std::forward(value)); @@ -335,8 +351,9 @@ container_algorithm_internal::ContainerDifferenceType c_count( // Container-based version of the `std::count_if()` function to // count values matching a condition within a container. template -container_algorithm_internal::ContainerDifferenceType c_count_if( - const C& c, Pred&& pred) { +ABSL_INTERNAL_CONSTEXPR_SINCE_CXX20 + container_algorithm_internal::ContainerDifferenceType + c_count_if(const C& c, Pred&& pred) { return std::count_if(container_algorithm_internal::c_begin(c), container_algorithm_internal::c_end(c), std::forward(pred)); @@ -348,8 +365,9 @@ container_algorithm_internal::ContainerDifferenceType c_count_if( // return the first element where two ordered containers differ. Applies `==` to // the first N elements of `c1` and `c2`, where N = min(size(c1), size(c2)). template -container_algorithm_internal::ContainerIterPairType c_mismatch(C1& c1, - C2& c2) { +ABSL_INTERNAL_CONSTEXPR_SINCE_CXX20 + container_algorithm_internal::ContainerIterPairType + c_mismatch(C1& c1, C2& c2) { return std::mismatch(container_algorithm_internal::c_begin(c1), container_algorithm_internal::c_end(c1), container_algorithm_internal::c_begin(c2), @@ -360,8 +378,9 @@ container_algorithm_internal::ContainerIterPairType c_mismatch(C1& c1, // the function's test condition. Applies `pred`to the first N elements of `c1` // and `c2`, where N = min(size(c1), size(c2)). template -container_algorithm_internal::ContainerIterPairType c_mismatch( - C1& c1, C2& c2, BinaryPredicate pred) { +ABSL_INTERNAL_CONSTEXPR_SINCE_CXX20 + container_algorithm_internal::ContainerIterPairType + c_mismatch(C1& c1, C2& c2, BinaryPredicate pred) { return std::mismatch(container_algorithm_internal::c_begin(c1), container_algorithm_internal::c_end(c1), container_algorithm_internal::c_begin(c2), @@ -373,7 +392,7 @@ container_algorithm_internal::ContainerIterPairType c_mismatch( // Container-based version of the `std::equal()` function to // test whether two containers are equal. template -bool c_equal(const C1& c1, const C2& c2) { +ABSL_INTERNAL_CONSTEXPR_SINCE_CXX20 bool c_equal(const C1& c1, const C2& c2) { return std::equal(container_algorithm_internal::c_begin(c1), container_algorithm_internal::c_end(c1), container_algorithm_internal::c_begin(c2), @@ -383,7 +402,8 @@ bool c_equal(const C1& c1, const C2& c2) { // Overload of c_equal() for using a predicate evaluation other than `==` as // the function's test condition. template -bool c_equal(const C1& c1, const C2& c2, BinaryPredicate&& pred) { +ABSL_INTERNAL_CONSTEXPR_SINCE_CXX20 bool c_equal(const C1& c1, const C2& c2, + BinaryPredicate&& pred) { return std::equal(container_algorithm_internal::c_begin(c1), container_algorithm_internal::c_end(c1), container_algorithm_internal::c_begin(c2), @@ -396,7 +416,8 @@ bool c_equal(const C1& c1, const C2& c2, BinaryPredicate&& pred) { // Container-based version of the `std::is_permutation()` function // to test whether a container is a permutation of another. template -bool c_is_permutation(const C1& c1, const C2& c2) { +ABSL_INTERNAL_CONSTEXPR_SINCE_CXX20 bool c_is_permutation(const C1& c1, + const C2& c2) { return std::is_permutation(container_algorithm_internal::c_begin(c1), container_algorithm_internal::c_end(c1), container_algorithm_internal::c_begin(c2), @@ -406,7 +427,8 @@ bool c_is_permutation(const C1& c1, const C2& c2) { // Overload of c_is_permutation() for using a predicate evaluation other than // `==` as the function's test condition. template -bool c_is_permutation(const C1& c1, const C2& c2, BinaryPredicate&& pred) { +ABSL_INTERNAL_CONSTEXPR_SINCE_CXX20 bool c_is_permutation( + const C1& c1, const C2& c2, BinaryPredicate&& pred) { return std::is_permutation(container_algorithm_internal::c_begin(c1), container_algorithm_internal::c_end(c1), container_algorithm_internal::c_begin(c2), @@ -419,8 +441,9 @@ bool c_is_permutation(const C1& c1, const C2& c2, BinaryPredicate&& pred) { // Container-based version of the `std::search()` function to search // a container for a subsequence. template -container_algorithm_internal::ContainerIter c_search( - Sequence1& sequence, Sequence2& subsequence) { +ABSL_INTERNAL_CONSTEXPR_SINCE_CXX20 + container_algorithm_internal::ContainerIter + c_search(Sequence1& sequence, Sequence2& subsequence) { return std::search(container_algorithm_internal::c_begin(sequence), container_algorithm_internal::c_end(sequence), container_algorithm_internal::c_begin(subsequence), @@ -430,8 +453,10 @@ container_algorithm_internal::ContainerIter c_search( // Overload of c_search() for using a predicate evaluation other than // `==` as the function's test condition. template -container_algorithm_internal::ContainerIter c_search( - Sequence1& sequence, Sequence2& subsequence, BinaryPredicate&& pred) { +ABSL_INTERNAL_CONSTEXPR_SINCE_CXX20 + container_algorithm_internal::ContainerIter + c_search(Sequence1& sequence, Sequence2& subsequence, + BinaryPredicate&& pred) { return std::search(container_algorithm_internal::c_begin(sequence), container_algorithm_internal::c_end(sequence), container_algorithm_internal::c_begin(subsequence), @@ -444,7 +469,8 @@ container_algorithm_internal::ContainerIter c_search( // Container-based version of the `std::ranges::contains_subrange()` // C++23 function to search a container for a subsequence. template -bool c_contains_subrange(Sequence1& sequence, Sequence2& subsequence) { +ABSL_INTERNAL_CONSTEXPR_SINCE_CXX20 bool c_contains_subrange( + Sequence1& sequence, Sequence2& subsequence) { return absl::c_search(sequence, subsequence) != container_algorithm_internal::c_end(sequence); } @@ -452,8 +478,8 @@ bool c_contains_subrange(Sequence1& sequence, Sequence2& subsequence) { // Overload of c_contains_subrange() for using a predicate evaluation other than // `==` as the function's test condition. template -bool c_contains_subrange(Sequence1& sequence, Sequence2& subsequence, - BinaryPredicate&& pred) { +ABSL_INTERNAL_CONSTEXPR_SINCE_CXX20 bool c_contains_subrange( + Sequence1& sequence, Sequence2& subsequence, BinaryPredicate&& pred) { return absl::c_search(sequence, subsequence, std::forward(pred)) != container_algorithm_internal::c_end(sequence); @@ -464,8 +490,9 @@ bool c_contains_subrange(Sequence1& sequence, Sequence2& subsequence, // Container-based version of the `std::search_n()` function to // search a container for the first sequence of N elements. template -container_algorithm_internal::ContainerIter c_search_n( - Sequence& sequence, Size count, T&& value) { +ABSL_INTERNAL_CONSTEXPR_SINCE_CXX20 + container_algorithm_internal::ContainerIter + c_search_n(Sequence& sequence, Size count, T&& value) { return std::search_n(container_algorithm_internal::c_begin(sequence), container_algorithm_internal::c_end(sequence), count, std::forward(value)); @@ -475,8 +502,10 @@ container_algorithm_internal::ContainerIter c_search_n( // `==` as the function's test condition. template -container_algorithm_internal::ContainerIter c_search_n( - Sequence& sequence, Size count, T&& value, BinaryPredicate&& pred) { +ABSL_INTERNAL_CONSTEXPR_SINCE_CXX20 + container_algorithm_internal::ContainerIter + c_search_n(Sequence& sequence, Size count, T&& value, + BinaryPredicate&& pred) { return std::search_n(container_algorithm_internal::c_begin(sequence), container_algorithm_internal::c_end(sequence), count, std::forward(value), diff --git a/third_party/abseil-cpp/absl/algorithm/container_test.cc b/third_party/abseil-cpp/absl/algorithm/container_test.cc index 50122249..fcb1cf51 100644 --- a/third_party/abseil-cpp/absl/algorithm/container_test.cc +++ b/third_party/abseil-cpp/absl/algorithm/container_test.cc @@ -1164,6 +1164,7 @@ TEST(MutatingTest, PermutationOperations) { #if defined(ABSL_INTERNAL_CPLUSPLUS_LANG) && \ ABSL_INTERNAL_CPLUSPLUS_LANG >= 201703L + TEST(ConstexprTest, Distance) { // Works at compile time with constexpr containers. static_assert(absl::c_distance(std::array()) == 3); @@ -1203,8 +1204,216 @@ TEST(ConstexprTest, MinMaxElementWithPredicate) { static_assert(*kMinMaxPair.first == 3); static_assert(*kMinMaxPair.second == 1); } - #endif // defined(ABSL_INTERNAL_CPLUSPLUS_LANG) && // ABSL_INTERNAL_CPLUSPLUS_LANG >= 201703L +#if defined(ABSL_INTERNAL_CPLUSPLUS_LANG) && \ + ABSL_INTERNAL_CPLUSPLUS_LANG >= 202002L + +TEST(ConstexprTest, LinearSearch) { + static constexpr std::array kArray = {1, 2, 3}; + static_assert(absl::c_linear_search(kArray, 3)); + static_assert(!absl::c_linear_search(kArray, 4)); +} + +TEST(ConstexprTest, AllOf) { + static constexpr std::array kArray = {1, 2, 3}; + static_assert(!absl::c_all_of(kArray, [](int x) { return x > 1; })); + static_assert(absl::c_all_of(kArray, [](int x) { return x > 0; })); +} + +TEST(ConstexprTest, AnyOf) { + static constexpr std::array kArray = {1, 2, 3}; + static_assert(absl::c_any_of(kArray, [](int x) { return x > 2; })); + static_assert(!absl::c_any_of(kArray, [](int x) { return x > 5; })); +} + +TEST(ConstexprTest, NoneOf) { + static constexpr std::array kArray = {1, 2, 3}; + static_assert(!absl::c_none_of(kArray, [](int x) { return x > 2; })); + static_assert(absl::c_none_of(kArray, [](int x) { return x > 5; })); +} + +TEST(ConstexprTest, ForEach) { + static constexpr std::array kArray = [] { + std::array array = {1, 2, 3}; + absl::c_for_each(array, [](int& x) { x += 1; }); + return array; + }(); + static_assert(kArray == std::array{2, 3, 4}); +} + +TEST(ConstexprTest, Find) { + static constexpr std::array kArray = {1, 2, 3}; + static_assert(absl::c_find(kArray, 1) == kArray.begin()); + static_assert(absl::c_find(kArray, 4) == kArray.end()); +} + +TEST(ConstexprTest, Contains) { + static constexpr std::array kArray = {1, 2, 3}; + static_assert(absl::c_contains(kArray, 1)); + static_assert(!absl::c_contains(kArray, 4)); +} + +TEST(ConstexprTest, FindIf) { + static constexpr std::array kArray = {1, 2, 3}; + static_assert(absl::c_find_if(kArray, [](int x) { return x > 2; }) == + kArray.begin() + 2); + static_assert(absl::c_find_if(kArray, [](int x) { return x > 5; }) == + kArray.end()); +} + +TEST(ConstexprTest, FindIfNot) { + static constexpr std::array kArray = {1, 2, 3}; + static_assert(absl::c_find_if_not(kArray, [](int x) { return x > 1; }) == + kArray.begin()); + static_assert(absl::c_find_if_not(kArray, [](int x) { return x > 0; }) == + kArray.end()); +} + +TEST(ConstexprTest, FindEnd) { + static constexpr std::array kHaystack = {1, 2, 3, 2, 3}; + static constexpr std::array kNeedle = {2, 3}; + static_assert(absl::c_find_end(kHaystack, kNeedle) == kHaystack.begin() + 3); +} + +TEST(ConstexprTest, FindFirstOf) { + static constexpr std::array kArray = {1, 2, 3}; + static_assert(absl::c_find_first_of(kArray, kArray) == kArray.begin()); +} + +TEST(ConstexprTest, AdjacentFind) { + static constexpr std::array kArray = {1, 2, 2, 3}; + static_assert(absl::c_adjacent_find(kArray) == kArray.begin() + 1); +} + +TEST(ConstexprTest, AdjacentFindWithPredicate) { + static constexpr std::array kArray = {1, 2, 3}; + static_assert(absl::c_adjacent_find(kArray, std::less()) == + kArray.begin()); +} + +TEST(ConstexprTest, Count) { + static constexpr std::array kArray = {1, 2, 3}; + static_assert(absl::c_count(kArray, 1) == 1); + static_assert(absl::c_count(kArray, 2) == 1); + static_assert(absl::c_count(kArray, 3) == 1); + static_assert(absl::c_count(kArray, 4) == 0); +} + +TEST(ConstexprTest, CountIf) { + static constexpr std::array kArray = {1, 2, 3}; + static_assert(absl::c_count_if(kArray, [](int x) { return x > 0; }) == 3); + static_assert(absl::c_count_if(kArray, [](int x) { return x > 1; }) == 2); +} + +TEST(ConstexprTest, Mismatch) { + static constexpr std::array kArray1 = {1, 2, 3}; + static constexpr std::array kArray2 = {1, 2, 3}; + static constexpr std::array kArray3 = {2, 3, 4}; + static_assert(absl::c_mismatch(kArray1, kArray2) == + std::pair{kArray1.end(), kArray2.end()}); + static_assert(absl::c_mismatch(kArray1, kArray3) == + std::pair{kArray1.begin(), kArray3.begin()}); +} + +TEST(ConstexprTest, MismatchWithPredicate) { + static constexpr std::array kArray1 = {1, 2, 3}; + static constexpr std::array kArray2 = {1, 2, 3}; + static constexpr std::array kArray3 = {2, 3, 4}; + static_assert(absl::c_mismatch(kArray1, kArray2, std::not_equal_to()) == + std::pair{kArray1.begin(), kArray2.begin()}); + static_assert(absl::c_mismatch(kArray1, kArray3, std::not_equal_to()) == + std::pair{kArray1.end(), kArray3.end()}); +} + +TEST(ConstexprTest, Equal) { + static constexpr std::array kArray1 = {1, 2, 3}; + static constexpr std::array kArray2 = {1, 2, 3}; + static constexpr std::array kArray3 = {2, 3, 4}; + static_assert(absl::c_equal(kArray1, kArray2)); + static_assert(!absl::c_equal(kArray1, kArray3)); +} + +TEST(ConstexprTest, EqualWithPredicate) { + static constexpr std::array kArray1 = {1, 2, 3}; + static constexpr std::array kArray2 = {1, 2, 3}; + static constexpr std::array kArray3 = {2, 3, 4}; + static_assert(!absl::c_equal(kArray1, kArray2, std::not_equal_to())); + static_assert(absl::c_equal(kArray1, kArray3, std::not_equal_to())); +} + +TEST(ConstexprTest, IsPermutation) { + static constexpr std::array kArray1 = {1, 2, 3}; + static constexpr std::array kArray2 = {3, 2, 1}; + static constexpr std::array kArray3 = {2, 3, 4}; + static_assert(absl::c_is_permutation(kArray1, kArray2)); + static_assert(!absl::c_is_permutation(kArray1, kArray3)); +} + +TEST(ConstexprTest, IsPermutationWithPredicate) { + static constexpr std::array kArray1 = {1, 2, 3}; + static constexpr std::array kArray2 = {3, 2, 1}; + static constexpr std::array kArray3 = {2, 3, 4}; + static_assert(absl::c_is_permutation(kArray1, kArray2, std::equal_to())); + static_assert( + !absl::c_is_permutation(kArray1, kArray3, std::equal_to())); +} + +TEST(ConstexprTest, Search) { + static constexpr std::array kArray1 = {1, 2, 3}; + static constexpr std::array kArray2 = {1, 2, 3}; + static constexpr std::array kArray3 = {2, 3, 4}; + static_assert(absl::c_search(kArray1, kArray2) == kArray1.begin()); + static_assert(absl::c_search(kArray1, kArray3) == kArray1.end()); +} + +TEST(ConstexprTest, SearchWithPredicate) { + static constexpr std::array kArray1 = {1, 2, 3}; + static constexpr std::array kArray2 = {1, 2, 3}; + static constexpr std::array kArray3 = {2, 3, 4}; + static_assert(absl::c_search(kArray1, kArray2, std::not_equal_to()) == + kArray1.end()); + static_assert(absl::c_search(kArray1, kArray3, std::not_equal_to()) == + kArray1.begin()); +} + +TEST(ConstexprTest, ContainsSubrange) { + static constexpr std::array kArray1 = {1, 2, 3}; + static constexpr std::array kArray2 = {1, 2, 3}; + static constexpr std::array kArray3 = {2, 3, 4}; + static_assert(absl::c_contains_subrange(kArray1, kArray2)); + static_assert(!absl::c_contains_subrange(kArray1, kArray3)); +} + +TEST(ConstexprTest, ContainsSubrangeWithPredicate) { + static constexpr std::array kArray1 = {1, 2, 3}; + static constexpr std::array kArray2 = {1, 2, 3}; + static constexpr std::array kArray3 = {2, 3, 4}; + static_assert( + !absl::c_contains_subrange(kArray1, kArray2, std::not_equal_to<>())); + static_assert( + absl::c_contains_subrange(kArray1, kArray3, std::not_equal_to<>())); +} + +TEST(ConstexprTest, SearchN) { + static constexpr std::array kArray = {1, 2, 2, 3}; + static_assert(absl::c_search_n(kArray, 1, 1) == kArray.begin()); + static_assert(absl::c_search_n(kArray, 2, 2) == kArray.begin() + 1); + static_assert(absl::c_search_n(kArray, 1, 4) == kArray.end()); +} + +TEST(ConstexprTest, SearchNWithPredicate) { + static constexpr std::array kArray = {1, 2, 2, 3}; + static_assert(absl::c_search_n(kArray, 1, 1, std::not_equal_to()) == + kArray.begin() + 1); + static_assert(absl::c_search_n(kArray, 2, 2, std::not_equal_to()) == + kArray.end()); + static_assert(absl::c_search_n(kArray, 1, 4, std::not_equal_to()) == + kArray.begin()); +} + +#endif // defined(ABSL_INTERNAL_CPLUSPLUS_LANG) && + // ABSL_INTERNAL_CPLUSPLUS_LANG >= 202002L + } // namespace diff --git a/third_party/abseil-cpp/absl/base/BUILD.bazel b/third_party/abseil-cpp/absl/base/BUILD.bazel index 96503c90..fa3719dc 100644 --- a/third_party/abseil-cpp/absl/base/BUILD.bazel +++ b/third_party/abseil-cpp/absl/base/BUILD.bazel @@ -929,3 +929,44 @@ cc_test( "@com_google_googletest//:gtest_main", ], ) + +cc_library( + name = "tracing_internal", + srcs = ["internal/tracing.cc"], + hdrs = ["internal/tracing.h"], + copts = ABSL_DEFAULT_COPTS, + linkopts = ABSL_DEFAULT_LINKOPTS, + visibility = [ + "//absl:__subpackages__", + ], + deps = [ + "//absl/base:config", + "//absl/base:core_headers", + ], +) + +cc_test( + name = "tracing_internal_weak_test", + srcs = ["internal/tracing_weak_test.cc"], + copts = ABSL_TEST_COPTS, + linkopts = ABSL_DEFAULT_LINKOPTS, + deps = [ + ":tracing_internal", + "@com_google_googletest//:gtest", + "@com_google_googletest//:gtest_main", + ], +) + +cc_test( + name = "tracing_internal_strong_test", + srcs = ["internal/tracing_strong_test.cc"], + copts = ABSL_TEST_COPTS, + linkopts = ABSL_DEFAULT_LINKOPTS, + deps = [ + ":config", + ":core_headers", + ":tracing_internal", + "@com_google_googletest//:gtest", + "@com_google_googletest//:gtest_main", + ], +) diff --git a/third_party/abseil-cpp/absl/base/CMakeLists.txt b/third_party/abseil-cpp/absl/base/CMakeLists.txt index 97994fc6..eed0afc3 100644 --- a/third_party/abseil-cpp/absl/base/CMakeLists.txt +++ b/third_party/abseil-cpp/absl/base/CMakeLists.txt @@ -769,3 +769,42 @@ absl_cc_test( absl::poison GTest::gtest_main ) + +absl_cc_library( + NAME + tracing_internal + HDRS + "internal/tracing.h" + SRCS + "internal/tracing.cc" + COPTS + ${ABSL_DEFAULT_COPTS} + DEPS + absl::base +) + +absl_cc_test( + NAME + tracing_internal_weak_test + SRCS + "internal/tracing_weak_test.cc" + COPTS + ${ABSL_TEST_COPTS} + DEPS + absl::base + absl::tracing_internal + GTest::gtest_main +) + +absl_cc_test( + NAME + tracing_internal_strong_test + SRCS + "internal/tracing_strong_test.cc" + COPTS + ${ABSL_TEST_COPTS} + DEPS + absl::base + absl::tracing_internal + GTest::gtest_main +) diff --git a/third_party/abseil-cpp/absl/base/attributes.h b/third_party/abseil-cpp/absl/base/attributes.h index 5ea5ee3e..c5f35bec 100644 --- a/third_party/abseil-cpp/absl/base/attributes.h +++ b/third_party/abseil-cpp/absl/base/attributes.h @@ -133,12 +133,14 @@ // Tags a function as weak for the purposes of compilation and linking. // Weak attributes did not work properly in LLVM's Windows backend before // 9.0.0, so disable them there. See https://bugs.llvm.org/show_bug.cgi?id=37598 -// for further information. +// for further information. Weak attributes do not work across DLL boundary. // The MinGW compiler doesn't complain about the weak attribute until the link // step, presumably because Windows doesn't use ELF binaries. -#if (ABSL_HAVE_ATTRIBUTE(weak) || \ - (defined(__GNUC__) && !defined(__clang__))) && \ - (!defined(_WIN32) || (defined(__clang__) && __clang_major__ >= 9)) && \ +#if (ABSL_HAVE_ATTRIBUTE(weak) || \ + (defined(__GNUC__) && !defined(__clang__))) && \ + (!defined(_WIN32) || \ + (defined(__clang__) && __clang_major__ >= 9 && \ + !defined(ABSL_BUILD_DLL) && !defined(ABSL_CONSUME_DLL))) && \ !defined(__MINGW32__) #undef ABSL_ATTRIBUTE_WEAK #define ABSL_ATTRIBUTE_WEAK __attribute__((weak)) diff --git a/third_party/abseil-cpp/absl/base/config.h b/third_party/abseil-cpp/absl/base/config.h index 97c9a22a..ab1e9860 100644 --- a/third_party/abseil-cpp/absl/base/config.h +++ b/third_party/abseil-cpp/absl/base/config.h @@ -926,7 +926,7 @@ static_assert(ABSL_INTERNAL_INLINE_NAMESPACE_STR[0] != 'h' || // https://llvm.org/docs/CompileCudaWithLLVM.html#detecting-clang-vs-nvcc-from-code #ifdef ABSL_INTERNAL_HAVE_ARM_NEON #error ABSL_INTERNAL_HAVE_ARM_NEON cannot be directly set -#elif defined(__ARM_NEON) && !defined(__CUDA_ARCH__) +#elif defined(__ARM_NEON) && !(defined(__NVCC__) && defined(__CUDACC__)) #define ABSL_INTERNAL_HAVE_ARM_NEON 1 #endif diff --git a/third_party/abseil-cpp/absl/base/internal/poison.cc b/third_party/abseil-cpp/absl/base/internal/poison.cc index 9afa0687..b33d4c2d 100644 --- a/third_party/abseil-cpp/absl/base/internal/poison.cc +++ b/third_party/abseil-cpp/absl/base/internal/poison.cc @@ -29,7 +29,9 @@ #include #elif defined(ABSL_HAVE_MMAP) #include -#elif defined(_WIN32) +#endif + +#if defined(_WIN32) #include #endif diff --git a/third_party/abseil-cpp/absl/base/internal/sysinfo.cc b/third_party/abseil-cpp/absl/base/internal/sysinfo.cc index 79eaba3e..1937db30 100644 --- a/third_party/abseil-cpp/absl/base/internal/sysinfo.cc +++ b/third_party/abseil-cpp/absl/base/internal/sysinfo.cc @@ -46,6 +46,10 @@ #include #endif +#if defined(__Fuchsia__) +#include +#endif + #include #include @@ -461,6 +465,16 @@ pid_t GetTID() { return reinterpret_cast(thread); } +#elif defined(__Fuchsia__) + +pid_t GetTID() { + // Use our thread handle as the TID, which should be unique within this + // process (but may not be globally unique). The handle value was chosen over + // a kernel object ID (KOID) because zx_handle_t (32-bits) can be cast to a + // pid_t type without loss of precision, but a zx_koid_t (64-bits) cannot. + return static_cast(zx_thread_self()); +} + #else // Fallback implementation of `GetTID` using `pthread_self`. diff --git a/third_party/abseil-cpp/absl/base/internal/tracing.cc b/third_party/abseil-cpp/absl/base/internal/tracing.cc new file mode 100644 index 00000000..d304e6a2 --- /dev/null +++ b/third_party/abseil-cpp/absl/base/internal/tracing.cc @@ -0,0 +1,39 @@ +// Copyright 2024 The Abseil Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "absl/base/internal/tracing.h" + +#include "absl/base/attributes.h" +#include "absl/base/config.h" + +namespace absl { +ABSL_NAMESPACE_BEGIN +namespace base_internal { + +extern "C" { + +ABSL_ATTRIBUTE_WEAK void ABSL_INTERNAL_C_SYMBOL(AbslInternalTraceWait)( + const void*, ObjectKind) {} +ABSL_ATTRIBUTE_WEAK void ABSL_INTERNAL_C_SYMBOL(AbslInternalTraceContinue)( + const void*, ObjectKind) {} +ABSL_ATTRIBUTE_WEAK void ABSL_INTERNAL_C_SYMBOL(AbslInternalTraceSignal)( + const void*, ObjectKind) {} +ABSL_ATTRIBUTE_WEAK void ABSL_INTERNAL_C_SYMBOL(AbslInternalTraceObserved)( + const void*, ObjectKind) {} + +} // extern "C" + +} // namespace base_internal +ABSL_NAMESPACE_END +} // namespace absl diff --git a/third_party/abseil-cpp/absl/base/internal/tracing.h b/third_party/abseil-cpp/absl/base/internal/tracing.h new file mode 100644 index 00000000..e7ab7758 --- /dev/null +++ b/third_party/abseil-cpp/absl/base/internal/tracing.h @@ -0,0 +1,81 @@ +// Copyright 2024 The Abseil Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef ABSL_BASE_INTERNAL_TRACING_H_ +#define ABSL_BASE_INTERNAL_TRACING_H_ + +#include "absl/base/config.h" + +namespace absl { +ABSL_NAMESPACE_BEGIN +namespace base_internal { + +// Well known Abseil object types that have causality. +enum class ObjectKind { kUnknown, kBlockingCounter, kNotification }; + +// `TraceWait` and `TraceContinue` record the start and end of a potentially +// blocking wait operation on `object`. `object` typically represents a higher +// level synchronization object such as `absl::Notification`. +void TraceWait(const void* object, ObjectKind kind); +void TraceContinue(const void* object, ObjectKind kind); + +// `TraceSignal` records a signal on `object`. +void TraceSignal(const void* object, ObjectKind kind); + +// `TraceObserved` records the non-blocking observation of a signaled object. +void TraceObserved(const void* object, ObjectKind kind); + +// --------------------------------------------------------------------------- +// Weak implementation detail: +// +// We define the weak API as extern "C": in some build configurations we pass +// `--detect-odr-violations` to the gold linker. This causes it to flag weak +// symbol overrides as ODR violations. Because ODR only applies to C++ and not +// C, `--detect-odr-violations` ignores symbols not mangled with C++ names. +// By changing our extension points to be extern "C", we dodge this check. +// --------------------------------------------------------------------------- +extern "C" { + + void ABSL_INTERNAL_C_SYMBOL(AbslInternalTraceWait)(const void* object, + ObjectKind kind); + void ABSL_INTERNAL_C_SYMBOL(AbslInternalTraceContinue)(const void* object, + ObjectKind kind); + void ABSL_INTERNAL_C_SYMBOL(AbslInternalTraceSignal)(const void* object, + ObjectKind kind); + void ABSL_INTERNAL_C_SYMBOL(AbslInternalTraceObserved)(const void* object, + ObjectKind kind); + +} // extern "C" + +inline void TraceWait(const void* object, ObjectKind kind) { + ABSL_INTERNAL_C_SYMBOL(AbslInternalTraceWait)(object, kind); +} + +inline void TraceContinue(const void* object, ObjectKind kind) { + ABSL_INTERNAL_C_SYMBOL(AbslInternalTraceContinue)(object, kind); +} + +inline void TraceSignal(const void* object, ObjectKind kind) { + ABSL_INTERNAL_C_SYMBOL(AbslInternalTraceSignal)(object, kind); +} + +inline void TraceObserved(const void* object, ObjectKind kind) { + ABSL_INTERNAL_C_SYMBOL(AbslInternalTraceObserved)(object, kind); +} + +} // namespace base_internal +ABSL_NAMESPACE_END +} // namespace absl + +#endif // ABSL_BASE_INTERNAL_TRACING_H_ diff --git a/third_party/abseil-cpp/absl/base/internal/tracing_strong_test.cc b/third_party/abseil-cpp/absl/base/internal/tracing_strong_test.cc new file mode 100644 index 00000000..979f1c57 --- /dev/null +++ b/third_party/abseil-cpp/absl/base/internal/tracing_strong_test.cc @@ -0,0 +1,117 @@ +// Copyright 2024 The Abseil Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +#include +#include + +#include "gmock/gmock.h" +#include "gtest/gtest.h" +#include "absl/base/attributes.h" +#include "absl/base/config.h" +#include "absl/base/internal/tracing.h" + +#if ABSL_HAVE_ATTRIBUTE_WEAK + +namespace { + +using ::testing::ElementsAre; + +using ::absl::base_internal::ObjectKind; + +enum Function { kWait, kContinue, kSignal, kObserved }; + +using Record = std::tuple; + +thread_local std::vector* tls_records = nullptr; + +} // namespace + +namespace absl { +ABSL_NAMESPACE_BEGIN +namespace base_internal { + +// Strong extern "C" implementation. +extern "C" { + +void ABSL_INTERNAL_C_SYMBOL(AbslInternalTraceWait)(const void* object, + ObjectKind kind) { + if (tls_records != nullptr) { + tls_records->push_back({kWait, object, kind}); + } +} + +void ABSL_INTERNAL_C_SYMBOL(AbslInternalTraceContinue)(const void* object, + ObjectKind kind) { + if (tls_records != nullptr) { + tls_records->push_back({kContinue, object, kind}); + } +} + +void ABSL_INTERNAL_C_SYMBOL(AbslInternalTraceSignal)(const void* object, + ObjectKind kind) { + if (tls_records != nullptr) { + tls_records->push_back({kSignal, object, kind}); + } +} + +void ABSL_INTERNAL_C_SYMBOL(AbslInternalTraceObserved)(const void* object, + ObjectKind kind) { + if (tls_records != nullptr) { + tls_records->push_back({kObserved, object, kind}); + } +} + +} // extern "C" + +} // namespace base_internal +ABSL_NAMESPACE_END +} // namespace absl + +namespace { + +TEST(TracingInternal, InvokesStrongFunctionWithNullptr) { + std::vector records; + tls_records = &records; + auto kind = absl::base_internal::ObjectKind::kUnknown; + absl::base_internal::TraceWait(nullptr, kind); + absl::base_internal::TraceContinue(nullptr, kind); + absl::base_internal::TraceSignal(nullptr, kind); + absl::base_internal::TraceObserved(nullptr, kind); + tls_records = nullptr; + + EXPECT_THAT(records, ElementsAre(Record{kWait, nullptr, kind}, + Record{kContinue, nullptr, kind}, + Record{kSignal, nullptr, kind}, + Record{kObserved, nullptr, kind})); +} + +TEST(TracingInternal, InvokesStrongFunctionWithObjectAddress) { + int object = 0; + std::vector records; + tls_records = &records; + auto kind = absl::base_internal::ObjectKind::kUnknown; + absl::base_internal::TraceWait(&object, kind); + absl::base_internal::TraceContinue(&object, kind); + absl::base_internal::TraceSignal(&object, kind); + absl::base_internal::TraceObserved(&object, kind); + tls_records = nullptr; + + EXPECT_THAT(records, ElementsAre(Record{kWait, &object, kind}, + Record{kContinue, &object, kind}, + Record{kSignal, &object, kind}, + Record{kObserved, &object, kind})); +} + +} // namespace + +#endif // ABSL_HAVE_ATTRIBUTE_WEAK diff --git a/third_party/abseil-cpp/absl/base/internal/tracing_weak_test.cc b/third_party/abseil-cpp/absl/base/internal/tracing_weak_test.cc new file mode 100644 index 00000000..6d7553fd --- /dev/null +++ b/third_party/abseil-cpp/absl/base/internal/tracing_weak_test.cc @@ -0,0 +1,34 @@ +// Copyright 2024 The Abseil Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "gtest/gtest.h" +#include "absl/base/internal/tracing.h" + +namespace { + +TEST(TracingInternal, HasDefaultImplementation) { + auto kind = absl::base_internal::ObjectKind::kUnknown; + absl::base_internal::TraceWait(nullptr, kind); + absl::base_internal::TraceContinue(nullptr, kind); + absl::base_internal::TraceSignal(nullptr, kind); + absl::base_internal::TraceObserved(nullptr, kind); + + int object = 0; + absl::base_internal::TraceWait(&object, kind); + absl::base_internal::TraceContinue(&object, kind); + absl::base_internal::TraceSignal(&object, kind); + absl::base_internal::TraceObserved(&object, kind); +} + +} // namespace diff --git a/third_party/abseil-cpp/absl/container/BUILD.bazel b/third_party/abseil-cpp/absl/container/BUILD.bazel index b00c30fd..ef71c904 100644 --- a/third_party/abseil-cpp/absl/container/BUILD.bazel +++ b/third_party/abseil-cpp/absl/container/BUILD.bazel @@ -732,6 +732,7 @@ cc_test( "//absl/memory", "//absl/meta:type_traits", "//absl/strings", + "//absl/types:optional", "@com_google_googletest//:gtest", "@com_google_googletest//:gtest_main", ], diff --git a/third_party/abseil-cpp/absl/container/CMakeLists.txt b/third_party/abseil-cpp/absl/container/CMakeLists.txt index 25831d5f..18a511ee 100644 --- a/third_party/abseil-cpp/absl/container/CMakeLists.txt +++ b/third_party/abseil-cpp/absl/container/CMakeLists.txt @@ -791,6 +791,7 @@ absl_cc_test( absl::log absl::memory absl::node_hash_set + absl::optional absl::prefetch absl::raw_hash_set absl::strings diff --git a/third_party/abseil-cpp/absl/container/flat_hash_map.h b/third_party/abseil-cpp/absl/container/flat_hash_map.h index ebd9ed67..8c939652 100644 --- a/third_party/abseil-cpp/absl/container/flat_hash_map.h +++ b/third_party/abseil-cpp/absl/container/flat_hash_map.h @@ -426,8 +426,7 @@ class ABSL_INTERNAL_ATTRIBUTE_OWNER flat_hash_map // flat_hash_map::swap(flat_hash_map& other) // // Exchanges the contents of this `flat_hash_map` with those of the `other` - // flat hash map, avoiding invocation of any move, copy, or swap operations on - // individual elements. + // flat hash map. // // All iterators and references on the `flat_hash_map` remain valid, excepting // for the past-the-end iterator, which is invalidated. @@ -574,6 +573,21 @@ typename flat_hash_map::size_type erase_if( return container_internal::EraseIf(pred, &c); } +// swap(flat_hash_map<>, flat_hash_map<>) +// +// Swaps the contents of two `flat_hash_map` containers. +// +// NOTE: we need to define this function template in order for +// `flat_hash_set::swap` to be called instead of `std::swap`. Even though we +// have `swap(raw_hash_set&, raw_hash_set&)` defined, that function requires a +// derived-to-base conversion, whereas `std::swap` is a function template so +// `std::swap` will be preferred by compiler. +template +void swap(flat_hash_map& x, + flat_hash_map& y) noexcept(noexcept(x.swap(y))) { + x.swap(y); +} + namespace container_internal { // c_for_each_fast(flat_hash_map<>, Function) diff --git a/third_party/abseil-cpp/absl/container/flat_hash_set.h b/third_party/abseil-cpp/absl/container/flat_hash_set.h index a3e36e05..dd2e8109 100644 --- a/third_party/abseil-cpp/absl/container/flat_hash_set.h +++ b/third_party/abseil-cpp/absl/container/flat_hash_set.h @@ -360,8 +360,7 @@ class ABSL_INTERNAL_ATTRIBUTE_OWNER flat_hash_set // flat_hash_set::swap(flat_hash_set& other) // // Exchanges the contents of this `flat_hash_set` with those of the `other` - // flat hash set, avoiding invocation of any move, copy, or swap operations on - // individual elements. + // flat hash set. // // All iterators and references on the `flat_hash_set` remain valid, excepting // for the past-the-end iterator, which is invalidated. @@ -478,6 +477,21 @@ typename flat_hash_set::size_type erase_if( return container_internal::EraseIf(pred, &c); } +// swap(flat_hash_set<>, flat_hash_set<>) +// +// Swaps the contents of two `flat_hash_set` containers. +// +// NOTE: we need to define this function template in order for +// `flat_hash_set::swap` to be called instead of `std::swap`. Even though we +// have `swap(raw_hash_set&, raw_hash_set&)` defined, that function requires a +// derived-to-base conversion, whereas `std::swap` is a function template so +// `std::swap` will be preferred by compiler. +template +void swap(flat_hash_set& x, + flat_hash_set& y) noexcept(noexcept(x.swap(y))) { + return x.swap(y); +} + namespace container_internal { // c_for_each_fast(flat_hash_set<>, Function) diff --git a/third_party/abseil-cpp/absl/container/internal/container_memory.h b/third_party/abseil-cpp/absl/container/internal/container_memory.h index ba8e08a2..e7031797 100644 --- a/third_party/abseil-cpp/absl/container/internal/container_memory.h +++ b/third_party/abseil-cpp/absl/container/internal/container_memory.h @@ -17,6 +17,7 @@ #include #include +#include #include #include #include diff --git a/third_party/abseil-cpp/absl/container/internal/raw_hash_set.h b/third_party/abseil-cpp/absl/container/internal/raw_hash_set.h index d4fe8f5c..9d103021 100644 --- a/third_party/abseil-cpp/absl/container/internal/raw_hash_set.h +++ b/third_party/abseil-cpp/absl/container/internal/raw_hash_set.h @@ -536,6 +536,14 @@ static_assert(ctrl_t::kDeleted == static_cast(-2), // See definition comment for why this is size 32. ABSL_DLL extern const ctrl_t kEmptyGroup[32]; +// We use these sentinel capacity values in debug mode to indicate different +// classes of bugs. +enum InvalidCapacity : size_t { + kAboveMaxValidCapacity = ~size_t{} - 100, + // Used for reentrancy assertions. + kInvalidReentrance, +}; + // Returns a pointer to a control byte group that can be used by empty tables. inline ctrl_t* EmptyGroup() { // Const must be cast away here; no uses of this function will actually write @@ -1376,7 +1384,8 @@ class CommonFields : public CommonFieldsGenerationInfo { // The total number of available slots. size_t capacity() const { return capacity_; } void set_capacity(size_t c) { - assert(c == 0 || IsValidCapacity(c)); + // We allow setting above the max valid capacity for debugging purposes. + assert(c == 0 || IsValidCapacity(c) || c > kAboveMaxValidCapacity); capacity_ = c; } @@ -1444,6 +1453,20 @@ class CommonFields : public CommonFieldsGenerationInfo { std::count(control(), control() + capacity(), ctrl_t::kDeleted)); } + // Helper to enable sanitizer mode validation to protect against reentrant + // calls during element constructor/destructor. + template + void RunWithReentrancyGuard(F f) { +#ifdef NDEBUG + f(); + return; +#endif + const size_t cap = capacity(); + set_capacity(kInvalidReentrance); + f(); + set_capacity(cap); + } + private: // We store the has_infoz bit in the lowest bit of size_. static constexpr size_t HasInfozShift() { return 1; } @@ -2874,6 +2897,7 @@ class raw_hash_set { size_t max_size() const { return (std::numeric_limits::max)(); } ABSL_ATTRIBUTE_REINITIALIZES void clear() { + AssertValidCapacity(); // Iterating over this container is O(bucket_count()). When bucket_count() // is much greater than size(), iteration becomes prohibitively expensive. // For clear() it is more important to reuse the allocated array when the @@ -3127,6 +3151,7 @@ class raw_hash_set { // This overload is necessary because otherwise erase(const K&) would be // a better match if non-const iterator is passed as an argument. void erase(iterator it) { + AssertValidCapacity(); AssertIsFull(it.control(), it.generation(), it.generation_ptr(), "erase()"); destroy(it.slot()); if (is_soo()) { @@ -3138,6 +3163,7 @@ class raw_hash_set { iterator erase(const_iterator first, const_iterator last) ABSL_ATTRIBUTE_LIFETIME_BOUND { + AssertValidCapacity(); // We check for empty first because ClearBackingArray requires that // capacity() > 0 as a precondition. if (empty()) return end(); @@ -3193,6 +3219,7 @@ class raw_hash_set { } node_type extract(const_iterator position) { + AssertValidCapacity(); AssertIsFull(position.control(), position.inner_.generation(), position.inner_.generation_ptr(), "extract()"); auto node = CommonAccess::Transfer(alloc_ref(), position.slot()); @@ -3325,13 +3352,13 @@ class raw_hash_set { template iterator find(const key_arg& key, size_t hash) ABSL_ATTRIBUTE_LIFETIME_BOUND { - AssertHashEqConsistent(key); + AssertOnFind(key); if (is_soo()) return find_soo(key); return find_non_soo(key, hash); } template iterator find(const key_arg& key) ABSL_ATTRIBUTE_LIFETIME_BOUND { - AssertHashEqConsistent(key); + AssertOnFind(key); if (is_soo()) return find_soo(key); prefetch_heap_block(); return find_non_soo(key, hash_ref()(key)); @@ -3476,16 +3503,19 @@ class raw_hash_set { slot_type&& slot; }; - // TODO(b/303305702): re-enable reentrant validation. template inline void construct(slot_type* slot, Args&&... args) { - PolicyTraits::construct(&alloc_ref(), slot, std::forward(args)...); + common().RunWithReentrancyGuard([&] { + PolicyTraits::construct(&alloc_ref(), slot, std::forward(args)...); + }); } inline void destroy(slot_type* slot) { - PolicyTraits::destroy(&alloc_ref(), slot); + common().RunWithReentrancyGuard( + [&] { PolicyTraits::destroy(&alloc_ref(), slot); }); } inline void transfer(slot_type* to, slot_type* from) { - PolicyTraits::transfer(&alloc_ref(), to, from); + common().RunWithReentrancyGuard( + [&] { PolicyTraits::transfer(&alloc_ref(), to, from); }); } // TODO(b/289225379): consider having a helper class that has the impls for @@ -3690,15 +3720,23 @@ class raw_hash_set { static slot_type* to_slot(void* buf) { return static_cast(buf); } // Requires that lhs does not have a full SOO slot. - static void move_common(bool that_is_full_soo, allocator_type& rhs_alloc, + static void move_common(bool rhs_is_full_soo, allocator_type& rhs_alloc, CommonFields& lhs, CommonFields&& rhs) { - if (PolicyTraits::transfer_uses_memcpy() || !that_is_full_soo) { + if (PolicyTraits::transfer_uses_memcpy() || !rhs_is_full_soo) { lhs = std::move(rhs); } else { lhs.move_non_heap_or_soo_fields(rhs); - // TODO(b/303305702): add reentrancy guard. - PolicyTraits::transfer(&rhs_alloc, to_slot(lhs.soo_data()), - to_slot(rhs.soo_data())); +#ifndef NDEBUG + const size_t rhs_capacity = rhs.capacity(); + rhs.set_capacity(kInvalidReentrance); +#endif + lhs.RunWithReentrancyGuard([&] { + PolicyTraits::transfer(&rhs_alloc, to_slot(lhs.soo_data()), + to_slot(rhs.soo_data())); + }); +#ifndef NDEBUG + rhs.set_capacity(rhs_capacity); +#endif } } @@ -3831,11 +3869,28 @@ class raw_hash_set { } protected: + // Asserts for correctness that we run on find/find_or_prepare_insert. + template + void AssertOnFind(ABSL_ATTRIBUTE_UNUSED const K& key) { +#ifdef NDEBUG + return; +#endif + AssertHashEqConsistent(key); + AssertValidCapacity(); + } + + // Asserts that the capacity is not a sentinel invalid value. + // TODO(b/296061262): also add asserts for moved-from and destroyed states. + void AssertValidCapacity() const { + assert(capacity() != kInvalidReentrance && + "reentrant container access during element construction/destruction " + "is not allowed."); + } + // Asserts that hash and equal functors provided by the user are consistent, // meaning that `eq(k1, k2)` implies `hash(k1)==hash(k2)`. template - void AssertHashEqConsistent(ABSL_ATTRIBUTE_UNUSED const K& key) { -#ifndef NDEBUG + void AssertHashEqConsistent(const K& key) { if (empty()) return; const size_t hash_of_arg = hash_ref()(key); @@ -3852,13 +3907,13 @@ class raw_hash_set { // In this case, we're going to crash. Do a couple of other checks for // idempotence issues. Recalculating hash/eq here is also convenient for // debugging with gdb/lldb. - const size_t once_more_hash_arg = hash_ref()(key); + ABSL_ATTRIBUTE_UNUSED const size_t once_more_hash_arg = hash_ref()(key); assert(hash_of_arg == once_more_hash_arg && "hash is not idempotent."); - const size_t once_more_hash_slot = + ABSL_ATTRIBUTE_UNUSED const size_t once_more_hash_slot = PolicyTraits::apply(HashElement{hash_ref()}, element); assert(hash_of_slot == once_more_hash_slot && "hash is not idempotent."); - const bool once_more_eq = + ABSL_ATTRIBUTE_UNUSED const bool once_more_eq = PolicyTraits::apply(EqualElement{key, eq_ref()}, element); assert(is_key_equal == once_more_eq && "equality is not idempotent."); } @@ -3874,7 +3929,6 @@ class raw_hash_set { // We only do validation for small tables so that it's constant time. if (capacity() > 16) return; IterateOverFullSlots(common(), slot_array(), assert_consistent); -#endif } // Attempts to find `key` in the table; if it isn't found, returns an iterator @@ -3882,7 +3936,7 @@ class raw_hash_set { // `key`'s H2. Returns a bool indicating whether an insertion can take place. template std::pair find_or_prepare_insert(const K& key) { - AssertHashEqConsistent(key); + AssertOnFind(key); if (is_soo()) return find_or_prepare_insert_soo(key); return find_or_prepare_insert_non_soo(key); } diff --git a/third_party/abseil-cpp/absl/container/internal/raw_hash_set_test.cc b/third_party/abseil-cpp/absl/container/internal/raw_hash_set_test.cc index f1257d4b..923ea52b 100644 --- a/third_party/abseil-cpp/absl/container/internal/raw_hash_set_test.cc +++ b/third_party/abseil-cpp/absl/container/internal/raw_hash_set_test.cc @@ -62,6 +62,7 @@ #include "absl/meta/type_traits.h" #include "absl/strings/str_cat.h" #include "absl/strings/string_view.h" +#include "absl/types/optional.h" namespace absl { ABSL_NAMESPACE_BEGIN @@ -3594,6 +3595,72 @@ TEST(Iterator, InconsistentHashEqFunctorsValidation) { "hash/eq functors are inconsistent."); } +struct ConstructCaller { + explicit ConstructCaller(int v) : val(v) {} + ConstructCaller(int v, absl::FunctionRef func) : val(v) { func(); } + template + friend H AbslHashValue(H h, const ConstructCaller& d) { + return H::combine(std::move(h), d.val); + } + bool operator==(const ConstructCaller& c) const { return val == c.val; } + + int val; +}; + +struct DestroyCaller { + explicit DestroyCaller(int v) : val(v) {} + DestroyCaller(int v, absl::FunctionRef func) + : val(v), destroy_func(func) {} + DestroyCaller(DestroyCaller&& that) + : val(that.val), destroy_func(std::move(that.destroy_func)) { + that.Deactivate(); + } + ~DestroyCaller() { + if (destroy_func) (*destroy_func)(); + } + void Deactivate() { destroy_func = absl::nullopt; } + + template + friend H AbslHashValue(H h, const DestroyCaller& d) { + return H::combine(std::move(h), d.val); + } + bool operator==(const DestroyCaller& d) const { return val == d.val; } + + int val; + absl::optional> destroy_func; +}; + +TEST(Table, ReentrantCallsFail) { +#ifdef NDEBUG + GTEST_SKIP() << "Reentrant checks only enabled in debug mode."; +#else + { + ValueTable t; + t.insert(ConstructCaller{0}); + auto erase_begin = [&] { t.erase(t.begin()); }; + EXPECT_DEATH_IF_SUPPORTED(t.emplace(1, erase_begin), ""); + } + { + ValueTable t; + t.insert(DestroyCaller{0}); + auto find_0 = [&] { t.find(DestroyCaller{0}); }; + t.insert(DestroyCaller{1, find_0}); + for (int i = 10; i < 20; ++i) t.insert(DestroyCaller{i}); + EXPECT_DEATH_IF_SUPPORTED(t.clear(), ""); + for (auto& elem : t) elem.Deactivate(); + } + { + ValueTable t; + t.insert(DestroyCaller{0}); + auto insert_1 = [&] { t.insert(DestroyCaller{1}); }; + t.insert(DestroyCaller{1, insert_1}); + for (int i = 10; i < 20; ++i) t.insert(DestroyCaller{i}); + EXPECT_DEATH_IF_SUPPORTED(t.clear(), ""); + for (auto& elem : t) elem.Deactivate(); + } +#endif +} + } // namespace } // namespace container_internal ABSL_NAMESPACE_END diff --git a/third_party/abseil-cpp/absl/container/node_hash_map.h b/third_party/abseil-cpp/absl/container/node_hash_map.h index 5615e496..d1a311fd 100644 --- a/third_party/abseil-cpp/absl/container/node_hash_map.h +++ b/third_party/abseil-cpp/absl/container/node_hash_map.h @@ -417,8 +417,7 @@ class ABSL_INTERNAL_ATTRIBUTE_OWNER node_hash_map // node_hash_map::swap(node_hash_map& other) // // Exchanges the contents of this `node_hash_map` with those of the `other` - // node hash map, avoiding invocation of any move, copy, or swap operations on - // individual elements. + // node hash map. // // All iterators and references on the `node_hash_map` remain valid, excepting // for the past-the-end iterator, which is invalidated. @@ -558,6 +557,21 @@ typename node_hash_map::size_type erase_if( return container_internal::EraseIf(pred, &c); } +// swap(node_hash_map<>, node_hash_map<>) +// +// Swaps the contents of two `node_hash_map` containers. +// +// NOTE: we need to define this function template in order for +// `flat_hash_set::swap` to be called instead of `std::swap`. Even though we +// have `swap(raw_hash_set&, raw_hash_set&)` defined, that function requires a +// derived-to-base conversion, whereas `std::swap` is a function template so +// `std::swap` will be preferred by compiler. +template +void swap(node_hash_map& x, + node_hash_map& y) noexcept(noexcept(x.swap(y))) { + return x.swap(y); +} + namespace container_internal { // c_for_each_fast(node_hash_map<>, Function) diff --git a/third_party/abseil-cpp/absl/container/node_hash_set.h b/third_party/abseil-cpp/absl/container/node_hash_set.h index 53435ae6..23080746 100644 --- a/third_party/abseil-cpp/absl/container/node_hash_set.h +++ b/third_party/abseil-cpp/absl/container/node_hash_set.h @@ -349,8 +349,7 @@ class ABSL_INTERNAL_ATTRIBUTE_OWNER node_hash_set // node_hash_set::swap(node_hash_set& other) // // Exchanges the contents of this `node_hash_set` with those of the `other` - // node hash set, avoiding invocation of any move, copy, or swap operations on - // individual elements. + // node hash set. // // All iterators and references on the `node_hash_set` remain valid, excepting // for the past-the-end iterator, which is invalidated. @@ -467,6 +466,21 @@ typename node_hash_set::size_type erase_if( return container_internal::EraseIf(pred, &c); } +// swap(node_hash_set<>, node_hash_set<>) +// +// Swaps the contents of two `node_hash_set` containers. +// +// NOTE: we need to define this function template in order for +// `flat_hash_set::swap` to be called instead of `std::swap`. Even though we +// have `swap(raw_hash_set&, raw_hash_set&)` defined, that function requires a +// derived-to-base conversion, whereas `std::swap` is a function template so +// `std::swap` will be preferred by compiler. +template +void swap(node_hash_set& x, + node_hash_set& y) noexcept(noexcept(x.swap(y))) { + return x.swap(y); +} + namespace container_internal { // c_for_each_fast(node_hash_set<>, Function) diff --git a/third_party/abseil-cpp/absl/debugging/internal/stack_consumption.h b/third_party/abseil-cpp/absl/debugging/internal/stack_consumption.h index f41b64c3..f5ba5575 100644 --- a/third_party/abseil-cpp/absl/debugging/internal/stack_consumption.h +++ b/third_party/abseil-cpp/absl/debugging/internal/stack_consumption.h @@ -24,7 +24,7 @@ // Use this feature test macro to detect its availability. #ifdef ABSL_INTERNAL_HAVE_DEBUGGING_STACK_CONSUMPTION #error ABSL_INTERNAL_HAVE_DEBUGGING_STACK_CONSUMPTION cannot be set directly -#elif !defined(__APPLE__) && !defined(_WIN32) && \ +#elif !defined(__APPLE__) && !defined(_WIN32) && !defined(__Fuchsia__) && \ (defined(__i386__) || defined(__x86_64__) || defined(__ppc__) || \ defined(__aarch64__) || defined(__riscv)) #define ABSL_INTERNAL_HAVE_DEBUGGING_STACK_CONSUMPTION 1 diff --git a/third_party/abseil-cpp/absl/debugging/leak_check.cc b/third_party/abseil-cpp/absl/debugging/leak_check.cc index fdb8798b..1e57e6af 100644 --- a/third_party/abseil-cpp/absl/debugging/leak_check.cc +++ b/third_party/abseil-cpp/absl/debugging/leak_check.cc @@ -43,7 +43,7 @@ bool LeakCheckerIsActive() { bool LeakCheckerIsActive() { return true; } #endif -bool FindAndReportLeaks() { return __lsan_do_recoverable_leak_check(); } +bool FindAndReportLeaks() { return __lsan_do_recoverable_leak_check() != 0; } void DoIgnoreLeak(const void* ptr) { __lsan_ignore_object(ptr); } void RegisterLivePointers(const void* ptr, size_t size) { __lsan_register_root_region(ptr, size); diff --git a/third_party/abseil-cpp/absl/flags/BUILD.bazel b/third_party/abseil-cpp/absl/flags/BUILD.bazel index 7a8ec7e6..af5f8d3e 100644 --- a/third_party/abseil-cpp/absl/flags/BUILD.bazel +++ b/third_party/abseil-cpp/absl/flags/BUILD.bazel @@ -498,7 +498,10 @@ cc_test( ], copts = ABSL_TEST_COPTS, linkopts = ABSL_DEFAULT_LINKOPTS, - tags = ["no_test_wasm"], + tags = [ + "no_test_fuchsia_x64", + "no_test_wasm", + ], deps = [ ":program_name", "//absl/strings", diff --git a/third_party/abseil-cpp/absl/log/BUILD.bazel b/third_party/abseil-cpp/absl/log/BUILD.bazel index b13cf4d4..8b6a295a 100644 --- a/third_party/abseil-cpp/absl/log/BUILD.bazel +++ b/third_party/abseil-cpp/absl/log/BUILD.bazel @@ -309,6 +309,7 @@ cc_test( cc_test( name = "absl_log_basic_test", size = "small", + timeout = "moderate", srcs = ["absl_log_basic_test.cc"], copts = ABSL_TEST_COPTS, linkopts = ABSL_DEFAULT_LINKOPTS, @@ -424,6 +425,7 @@ cc_test( cc_test( name = "log_basic_test", size = "small", + timeout = "moderate", srcs = ["log_basic_test.cc"], copts = ABSL_TEST_COPTS, linkopts = ABSL_DEFAULT_LINKOPTS, diff --git a/third_party/abseil-cpp/absl/log/internal/BUILD.bazel b/third_party/abseil-cpp/absl/log/internal/BUILD.bazel index 2dbf337a..db8464eb 100644 --- a/third_party/abseil-cpp/absl/log/internal/BUILD.bazel +++ b/third_party/abseil-cpp/absl/log/internal/BUILD.bazel @@ -427,6 +427,7 @@ cc_binary( cc_test( name = "stderr_log_sink_test", size = "small", + timeout = "moderate", srcs = ["stderr_log_sink_test.cc"], copts = ABSL_TEST_COPTS, linkopts = ABSL_DEFAULT_LINKOPTS, @@ -435,6 +436,7 @@ cc_test( "no_test:os:ios", "no_test_android", "no_test_darwin_x86_64", + "no_test_fuchsia_x64", "no_test_ios", "no_test_wasm", ], diff --git a/third_party/abseil-cpp/absl/random/BUILD.bazel b/third_party/abseil-cpp/absl/random/BUILD.bazel index f276cc08..abb93f8e 100644 --- a/third_party/abseil-cpp/absl/random/BUILD.bazel +++ b/third_party/abseil-cpp/absl/random/BUILD.bazel @@ -474,6 +474,7 @@ cc_test( cc_test( name = "mock_distributions_test", size = "small", + timeout = "moderate", srcs = ["mock_distributions_test.cc"], copts = ABSL_TEST_COPTS, linkopts = ABSL_DEFAULT_LINKOPTS, diff --git a/third_party/abseil-cpp/absl/strings/BUILD.bazel b/third_party/abseil-cpp/absl/strings/BUILD.bazel index 9bffae61..2cc014ed 100644 --- a/third_party/abseil-cpp/absl/strings/BUILD.bazel +++ b/third_party/abseil-cpp/absl/strings/BUILD.bazel @@ -77,7 +77,6 @@ cc_library( "escaping.h", "has_absl_stringify.h", "internal/damerau_levenshtein_distance.h", - "internal/has_absl_stringify.h", "internal/string_constant.h", "match.h", "numbers.h", diff --git a/third_party/abseil-cpp/absl/strings/CMakeLists.txt b/third_party/abseil-cpp/absl/strings/CMakeLists.txt index 4a84dbbd..3a1619e8 100644 --- a/third_party/abseil-cpp/absl/strings/CMakeLists.txt +++ b/third_party/abseil-cpp/absl/strings/CMakeLists.txt @@ -42,7 +42,6 @@ absl_cc_library( "has_absl_stringify.h" "internal/damerau_levenshtein_distance.h" "internal/string_constant.h" - "internal/has_absl_stringify.h" "match.h" "numbers.h" "str_cat.h" diff --git a/third_party/abseil-cpp/absl/strings/ascii.cc b/third_party/abseil-cpp/absl/strings/ascii.cc index 20a696a1..2af13a6d 100644 --- a/third_party/abseil-cpp/absl/strings/ascii.cc +++ b/third_party/abseil-cpp/absl/strings/ascii.cc @@ -180,7 +180,7 @@ constexpr bool AsciiInAZRange(unsigned char c) { // Force-inline so the compiler won't merge the short and long implementations. template ABSL_ATTRIBUTE_ALWAYS_INLINE inline constexpr void AsciiStrCaseFoldImpl( - absl::Nonnull p, size_t size) { + absl::Nonnull dst, absl::Nonnull src, size_t size) { // The upper- and lowercase versions of ASCII characters differ by only 1 bit. // When we need to flip the case, we can xor with this bit to achieve the // desired result. Note that the choice of 'a' and 'A' here is arbitrary. We @@ -189,9 +189,9 @@ ABSL_ATTRIBUTE_ALWAYS_INLINE inline constexpr void AsciiStrCaseFoldImpl( constexpr unsigned char kAsciiCaseBitFlip = 'a' ^ 'A'; for (size_t i = 0; i < size; ++i) { - unsigned char v = static_cast(p[i]); + unsigned char v = static_cast(src[i]); v ^= AsciiInAZRange(v) ? kAsciiCaseBitFlip : 0; - p[i] = static_cast(v); + dst[i] = static_cast(v); } } @@ -201,17 +201,28 @@ constexpr size_t kCaseFoldThreshold = 16; // No-inline so the compiler won't merge the short and long implementations. template ABSL_ATTRIBUTE_NOINLINE constexpr void AsciiStrCaseFoldLong( - absl::Nonnull p, size_t size) { + absl::Nonnull dst, absl::Nonnull src, size_t size) { ABSL_ASSUME(size >= kCaseFoldThreshold); - AsciiStrCaseFoldImpl(p, size); + AsciiStrCaseFoldImpl(dst, src, size); } // Splitting to short and long strings to allow vectorization decisions // to be made separately in the long and short cases. template -constexpr void AsciiStrCaseFold(absl::Nonnull p, size_t size) { - size < kCaseFoldThreshold ? AsciiStrCaseFoldImpl(p, size) - : AsciiStrCaseFoldLong(p, size); +constexpr void AsciiStrCaseFold(absl::Nonnull dst, + absl::Nonnull src, size_t size) { + size < kCaseFoldThreshold ? AsciiStrCaseFoldImpl(dst, src, size) + : AsciiStrCaseFoldLong(dst, src, size); +} + +void AsciiStrToLower(absl::Nonnull dst, absl::Nonnull src, + size_t n) { + return AsciiStrCaseFold(dst, src, n); +} + +void AsciiStrToUpper(absl::Nonnull dst, absl::Nonnull src, + size_t n) { + return AsciiStrCaseFold(dst, src, n); } static constexpr size_t ValidateAsciiCasefold() { @@ -222,8 +233,8 @@ static constexpr size_t ValidateAsciiCasefold() { for (unsigned int i = 0; i < num_chars; ++i) { uppered[i] = lowered[i] = static_cast(i); } - AsciiStrCaseFold(&lowered[0], num_chars); - AsciiStrCaseFold(&uppered[0], num_chars); + AsciiStrCaseFold(&lowered[0], &lowered[0], num_chars); + AsciiStrCaseFold(&uppered[0], &uppered[0], num_chars); for (size_t i = 0; i < num_chars; ++i) { const char ch = static_cast(i), ch_upper = ('a' <= ch && ch <= 'z' ? 'A' + (ch - 'a') : ch), @@ -241,11 +252,13 @@ static_assert(ValidateAsciiCasefold() == 0, "error in case conversion"); } // namespace ascii_internal void AsciiStrToLower(absl::Nonnull s) { - return ascii_internal::AsciiStrCaseFold(&(*s)[0], s->size()); + char* p = &(*s)[0]; + return ascii_internal::AsciiStrCaseFold(p, p, s->size()); } void AsciiStrToUpper(absl::Nonnull s) { - return ascii_internal::AsciiStrCaseFold(&(*s)[0], s->size()); + char* p = &(*s)[0]; + return ascii_internal::AsciiStrCaseFold(p, p, s->size()); } void RemoveExtraAsciiWhitespace(absl::Nonnull str) { diff --git a/third_party/abseil-cpp/absl/strings/ascii.h b/third_party/abseil-cpp/absl/strings/ascii.h index c238f4de..c8c40e85 100644 --- a/third_party/abseil-cpp/absl/strings/ascii.h +++ b/third_party/abseil-cpp/absl/strings/ascii.h @@ -55,10 +55,12 @@ #include #include #include +#include #include "absl/base/attributes.h" #include "absl/base/config.h" #include "absl/base/nullability.h" +#include "absl/strings/internal/resize_uninitialized.h" #include "absl/strings/string_view.h" namespace absl { @@ -74,6 +76,12 @@ ABSL_DLL extern const char kToUpper[256]; // Declaration for the array of characters to lower-case characters. ABSL_DLL extern const char kToLower[256]; +void AsciiStrToLower(absl::Nonnull dst, absl::Nonnull src, + size_t n); + +void AsciiStrToUpper(absl::Nonnull dst, absl::Nonnull src, + size_t n); + } // namespace ascii_internal // ascii_isalpha() @@ -171,7 +179,18 @@ void AsciiStrToLower(absl::Nonnull s); // Creates a lowercase string from a given absl::string_view. ABSL_MUST_USE_RESULT inline std::string AsciiStrToLower(absl::string_view s) { - std::string result(s); + std::string result; + strings_internal::STLStringResizeUninitialized(&result, s.size()); + ascii_internal::AsciiStrToLower(&result[0], s.data(), s.size()); + return result; +} + +// Creates a lowercase string from a given std::string&&. +// +// (Template is used to lower priority of this overload.) +template +ABSL_MUST_USE_RESULT inline std::string AsciiStrToLower(std::string&& s) { + std::string result = std::move(s); absl::AsciiStrToLower(&result); return result; } @@ -189,7 +208,18 @@ void AsciiStrToUpper(absl::Nonnull s); // Creates an uppercase string from a given absl::string_view. ABSL_MUST_USE_RESULT inline std::string AsciiStrToUpper(absl::string_view s) { - std::string result(s); + std::string result; + strings_internal::STLStringResizeUninitialized(&result, s.size()); + ascii_internal::AsciiStrToUpper(&result[0], s.data(), s.size()); + return result; +} + +// Creates an uppercase string from a given std::string&&. +// +// (Template is used to lower priority of this overload.) +template +ABSL_MUST_USE_RESULT inline std::string AsciiStrToUpper(std::string&& s) { + std::string result = std::move(s); absl::AsciiStrToUpper(&result); return result; } diff --git a/third_party/abseil-cpp/absl/strings/ascii_benchmark.cc b/third_party/abseil-cpp/absl/strings/ascii_benchmark.cc index 4ae73174..0eff801a 100644 --- a/third_party/abseil-cpp/absl/strings/ascii_benchmark.cc +++ b/third_party/abseil-cpp/absl/strings/ascii_benchmark.cc @@ -129,4 +129,32 @@ BENCHMARK(BM_StrToUpper) ->RangeMultiplier(2) ->Range(64, 1 << 26); +static void BM_StrToUpperFromRvalref(benchmark::State& state) { + const size_t size = static_cast(state.range(0)); + std::string s(size, 'X'); + for (auto _ : state) { + benchmark::DoNotOptimize(s); + std::string res = absl::AsciiStrToUpper(std::string(s)); + benchmark::DoNotOptimize(res); + } +} +BENCHMARK(BM_StrToUpperFromRvalref) + ->DenseRange(0, 32) + ->RangeMultiplier(2) + ->Range(64, 1 << 26); + +static void BM_StrToLowerFromRvalref(benchmark::State& state) { + const size_t size = static_cast(state.range(0)); + std::string s(size, 'x'); + for (auto _ : state) { + benchmark::DoNotOptimize(s); + std::string res = absl::AsciiStrToLower(std::string(s)); + benchmark::DoNotOptimize(res); + } +} +BENCHMARK(BM_StrToLowerFromRvalref) + ->DenseRange(0, 32) + ->RangeMultiplier(2) + ->Range(64, 1 << 26); + } // namespace diff --git a/third_party/abseil-cpp/absl/strings/ascii_test.cc b/third_party/abseil-cpp/absl/strings/ascii_test.cc index 8885bb15..896ffb7d 100644 --- a/third_party/abseil-cpp/absl/strings/ascii_test.cc +++ b/third_party/abseil-cpp/absl/strings/ascii_test.cc @@ -192,11 +192,13 @@ TEST(AsciiStrTo, Lower) { const absl::string_view sp(str2); const std::string long_str("ABCDEFGHIJKLMNOPQRSTUVWXYZ1!a"); std::string mutable_str("_`?@[{AMNOPQRSTUVWXYZ"); + auto fun = []() -> std::string { return "PQRSTU"; }; EXPECT_EQ("abcdef", absl::AsciiStrToLower(buf)); EXPECT_EQ("ghijkl", absl::AsciiStrToLower(str)); EXPECT_EQ("mnopqr", absl::AsciiStrToLower(sp)); EXPECT_EQ("abcdefghijklmnopqrstuvwxyz1!a", absl::AsciiStrToLower(long_str)); + EXPECT_EQ("pqrstu", absl::AsciiStrToLower(fun())); absl::AsciiStrToLower(&mutable_str); EXPECT_EQ("_`?@[{amnopqrstuvwxyz", mutable_str); @@ -213,11 +215,13 @@ TEST(AsciiStrTo, Upper) { const std::string str2("_`?@[{amnopqrstuvwxyz"); const absl::string_view sp(str2); const std::string long_str("abcdefghijklmnopqrstuvwxyz1!A"); + auto fun = []() -> std::string { return "pqrstu"; }; EXPECT_EQ("ABCDEF", absl::AsciiStrToUpper(buf)); EXPECT_EQ("GHIJKL", absl::AsciiStrToUpper(str)); EXPECT_EQ("_`?@[{AMNOPQRSTUVWXYZ", absl::AsciiStrToUpper(sp)); EXPECT_EQ("ABCDEFGHIJKLMNOPQRSTUVWXYZ1!A", absl::AsciiStrToUpper(long_str)); + EXPECT_EQ("PQRSTU", absl::AsciiStrToUpper(fun())); char mutable_buf[] = "Mutable"; std::transform(mutable_buf, mutable_buf + strlen(mutable_buf), diff --git a/third_party/abseil-cpp/absl/strings/cord.h b/third_party/abseil-cpp/absl/strings/cord.h index c68b6f10..1d8fcd37 100644 --- a/third_party/abseil-cpp/absl/strings/cord.h +++ b/third_party/abseil-cpp/absl/strings/cord.h @@ -641,7 +641,6 @@ class Cord { bool operator==(const CharIterator& other) const; bool operator!=(const CharIterator& other) const; reference operator*() const; - pointer operator->() const; friend Cord; @@ -772,7 +771,7 @@ class Cord { // Cord::Find() // - // Returns an iterator to the first occurrance of the substring `needle`. + // Returns an iterator to the first occurrence of the substring `needle`. // // If the substring `needle` does not occur, `Cord::char_end()` is returned. CharIterator Find(absl::string_view needle) const; @@ -1659,10 +1658,6 @@ inline Cord::CharIterator::reference Cord::CharIterator::operator*() const { return *chunk_iterator_->data(); } -inline Cord::CharIterator::pointer Cord::CharIterator::operator->() const { - return chunk_iterator_->data(); -} - inline Cord Cord::AdvanceAndRead(absl::Nonnull it, size_t n_bytes) { assert(it != nullptr); diff --git a/third_party/abseil-cpp/absl/strings/cord_test.cc b/third_party/abseil-cpp/absl/strings/cord_test.cc index eaf6d719..dfcaa004 100644 --- a/third_party/abseil-cpp/absl/strings/cord_test.cc +++ b/third_party/abseil-cpp/absl/strings/cord_test.cc @@ -2530,8 +2530,6 @@ static void VerifyCharIterator(const absl::Cord& cord) { EXPECT_EQ(*pre_iter, *post_iter); EXPECT_EQ(&*pre_iter, &*post_iter); - EXPECT_EQ(&*pre_iter, pre_iter.operator->()); - const char* character_address = &*pre_iter; absl::Cord::CharIterator copy = pre_iter; ++copy; @@ -3278,6 +3276,26 @@ TEST_P(CordTest, ChecksummedEmptyCord) { EXPECT_EQ(absl::HashOf(c3), absl::HashOf(absl::string_view())); } +// This must not be static to avoid aggressive optimizations. +ABSL_ATTRIBUTE_WEAK +size_t FalseReport(const absl::Cord& a, bool f); + +ABSL_ATTRIBUTE_NOINLINE +size_t FalseReport(const absl::Cord& a, bool f) { + absl::Cord b; + const absl::Cord& ref = f ? b : a; + // Test that sanitizers report nothing here. Without + // InlineData::Rep::annotated_this() compiler can unconditionally load + // poisoned parts, assuming that local variable is fully accessible. + return ref.size(); +} + +TEST(CordSanitizerTest, SanitizesCordFalseReport) { + absl::Cord c; + for (int i = 0; i < 1000; ++i) c.Append("a"); + FalseReport(c, false); +} + TEST(CrcCordTest, ChecksummedEmptyCordEstimateMemoryUsage) { absl::Cord cord; cord.SetExpectedChecksum(0); diff --git a/third_party/abseil-cpp/absl/strings/internal/cord_internal.h b/third_party/abseil-cpp/absl/strings/internal/cord_internal.h index 9420e764..d33b09e6 100644 --- a/third_party/abseil-cpp/absl/strings/internal/cord_internal.h +++ b/third_party/abseil-cpp/absl/strings/internal/cord_internal.h @@ -259,7 +259,7 @@ struct CordRep { // on the specific layout of these fields. Notably: the non-trivial field // `refcount` being preceded by `length`, and being tailed by POD data // members only. - // # LINT.IfChange + // LINT.IfChange size_t length; RefcountAndFlags refcount; // If tag < FLAT, it represents CordRepKind and indicates the type of node. @@ -275,7 +275,7 @@ struct CordRep { // allocate room for these in the derived class, as not all compilers reuse // padding space from the base class (clang and gcc do, MSVC does not, etc) uint8_t storage[3]; - // # LINT.ThenChange(cord_rep_btree.h:copy_raw) + // LINT.ThenChange(cord_rep_btree.h:copy_raw) // Returns true if this instance's tag matches the requested type. constexpr bool IsSubstring() const { return tag == SUBSTRING; } @@ -713,35 +713,53 @@ class InlineData { GetOrNull(chars, 13), GetOrNull(chars, 14)} {} +#ifdef ABSL_INTERNAL_CORD_HAVE_SANITIZER + // Break compiler optimization for cases when value is allocated on the + // stack. Compiler assumes that the the variable is fully accessible + // regardless of our poisoning. + // Missing report: https://github.com/llvm/llvm-project/issues/100640 + const Rep* self() const { + const Rep* volatile ptr = this; + return ptr; + } + Rep* self() { + Rep* volatile ptr = this; + return ptr; + } +#else + constexpr const Rep* self() const { return this; } + constexpr Rep* self() { return this; } +#endif + // Disable sanitizer as we must always be able to read `tag`. ABSL_CORD_INTERNAL_NO_SANITIZE int8_t tag() const { return reinterpret_cast(this)[0]; } - void set_tag(int8_t rhs) { reinterpret_cast(this)[0] = rhs; } + void set_tag(int8_t rhs) { reinterpret_cast(self())[0] = rhs; } - char* as_chars() { return data + 1; } - const char* as_chars() const { return data + 1; } + char* as_chars() { return self()->data + 1; } + const char* as_chars() const { return self()->data + 1; } - bool is_tree() const { return (tag() & 1) != 0; } + bool is_tree() const { return (self()->tag() & 1) != 0; } size_t inline_size() const { - ABSL_ASSERT(!is_tree()); - return static_cast(tag()) >> 1; + ABSL_ASSERT(!self()->is_tree()); + return static_cast(self()->tag()) >> 1; } void set_inline_size(size_t size) { ABSL_ASSERT(size <= kMaxInline); - set_tag(static_cast(size << 1)); + self()->set_tag(static_cast(size << 1)); } - CordRep* tree() const { return as_tree.rep; } - void set_tree(CordRep* rhs) { as_tree.rep = rhs; } + CordRep* tree() const { return self()->as_tree.rep; } + void set_tree(CordRep* rhs) { self()->as_tree.rep = rhs; } - cordz_info_t cordz_info() const { return as_tree.cordz_info; } - void set_cordz_info(cordz_info_t rhs) { as_tree.cordz_info = rhs; } + cordz_info_t cordz_info() const { return self()->as_tree.cordz_info; } + void set_cordz_info(cordz_info_t rhs) { self()->as_tree.cordz_info = rhs; } void make_tree(CordRep* tree) { - as_tree.rep = tree; - as_tree.cordz_info = kNullCordzInfo; + self()->as_tree.rep = tree; + self()->as_tree.cordz_info = kNullCordzInfo; } #ifdef ABSL_INTERNAL_CORD_HAVE_SANITIZER diff --git a/third_party/abseil-cpp/absl/strings/internal/cord_rep_btree.h b/third_party/abseil-cpp/absl/strings/internal/cord_rep_btree.h index be94b62e..ab259afe 100644 --- a/third_party/abseil-cpp/absl/strings/internal/cord_rep_btree.h +++ b/third_party/abseil-cpp/absl/strings/internal/cord_rep_btree.h @@ -684,14 +684,14 @@ inline CordRepBtree* CordRepBtree::CopyRaw(size_t new_length) const { // except `refcount` is trivially copyable, and the compiler does not // efficiently coalesce member-wise copy of these members. // See https://gcc.godbolt.org/z/qY8zsca6z - // # LINT.IfChange(copy_raw) + // LINT.IfChange(copy_raw) tree->length = new_length; uint8_t* dst = &tree->tag; const uint8_t* src = &tag; const ptrdiff_t offset = src - reinterpret_cast(this); memcpy(dst, src, sizeof(CordRepBtree) - static_cast(offset)); return tree; - // # LINT.ThenChange() + // LINT.ThenChange() } inline CordRepBtree* CordRepBtree::Copy() const { diff --git a/third_party/abseil-cpp/absl/strings/internal/has_absl_stringify.h b/third_party/abseil-cpp/absl/strings/internal/has_absl_stringify.h deleted file mode 100644 index 98f27dff..00000000 --- a/third_party/abseil-cpp/absl/strings/internal/has_absl_stringify.h +++ /dev/null @@ -1,54 +0,0 @@ -// Copyright 2024 The Abseil Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#ifndef ABSL_STRINGS_INTERNAL_HAS_ABSL_STRINGIFY_H_ -#define ABSL_STRINGS_INTERNAL_HAS_ABSL_STRINGIFY_H_ - -#include "absl/strings/has_absl_stringify.h" - -#include -#include - -#include "absl/base/config.h" - -namespace absl { -ABSL_NAMESPACE_BEGIN - -namespace strings_internal { - -// This exists to fix a circular dependency problem with the GoogleTest release. -// GoogleTest referenced this internal file and this internal trait. Since -// simultaneous releases are not possible since once release must reference -// another, we will temporarily add this back. -// https://github.com/google/googletest/blob/v1.14.x/googletest/include/gtest/gtest-printers.h#L119 -// -// This file can be deleted after the next Abseil and GoogleTest release. -// -// https://github.com/google/googletest/pull/4368#issuecomment-1717699895 -// https://github.com/google/googletest/pull/4368#issuecomment-1717699895 -template -struct HasAbslStringify : std::false_type {}; - -template -struct HasAbslStringify< - T, std::enable_if_t(), - std::declval()))>::value>> : std::true_type {}; - -} // namespace strings_internal - -ABSL_NAMESPACE_END -} // namespace absl - -#endif // ABSL_STRINGS_INTERNAL_HAS_ABSL_STRINGIFY_H_ diff --git a/third_party/abseil-cpp/absl/synchronization/BUILD.bazel b/third_party/abseil-cpp/absl/synchronization/BUILD.bazel index dafeba33..46a23f3b 100644 --- a/third_party/abseil-cpp/absl/synchronization/BUILD.bazel +++ b/third_party/abseil-cpp/absl/synchronization/BUILD.bazel @@ -141,6 +141,7 @@ cc_library( "//absl/base:dynamic_annotations", "//absl/base:malloc_internal", "//absl/base:raw_logging_internal", + "//absl/base:tracing_internal", "//absl/debugging:stacktrace", "//absl/debugging:symbolize", "//absl/time", @@ -177,6 +178,9 @@ cc_test( ], deps = [ ":synchronization", + "//absl/base:config", + "//absl/base:core_headers", + "//absl/base:tracing_internal", "//absl/time", "@com_google_googletest//:gtest", "@com_google_googletest//:gtest_main", @@ -320,6 +324,9 @@ cc_test( tags = ["no_test_lexan"], deps = [ ":synchronization", + "//absl/base:config", + "//absl/base:core_headers", + "//absl/base:tracing_internal", "//absl/time", "@com_google_googletest//:gtest", "@com_google_googletest//:gtest_main", diff --git a/third_party/abseil-cpp/absl/synchronization/CMakeLists.txt b/third_party/abseil-cpp/absl/synchronization/CMakeLists.txt index a0f64e5c..a7455747 100644 --- a/third_party/abseil-cpp/absl/synchronization/CMakeLists.txt +++ b/third_party/abseil-cpp/absl/synchronization/CMakeLists.txt @@ -112,7 +112,9 @@ absl_cc_library( absl::raw_logging_internal absl::stacktrace absl::symbolize + absl::tracing_internal absl::time + absl::tracing_internal Threads::Threads PUBLIC ) @@ -140,6 +142,7 @@ absl_cc_test( DEPS absl::synchronization absl::time + absl::tracing_internal GTest::gmock_main ) @@ -215,8 +218,12 @@ absl_cc_test( COPTS ${ABSL_TEST_COPTS} DEPS + absl::base + absl::config + absl::core_headers absl::synchronization absl::time + absl::tracing_internal GTest::gmock_main ) diff --git a/third_party/abseil-cpp/absl/synchronization/blocking_counter.cc b/third_party/abseil-cpp/absl/synchronization/blocking_counter.cc index d2f82da3..a530baf4 100644 --- a/third_party/abseil-cpp/absl/synchronization/blocking_counter.cc +++ b/third_party/abseil-cpp/absl/synchronization/blocking_counter.cc @@ -17,6 +17,7 @@ #include #include "absl/base/internal/raw_logging.h" +#include "absl/base/internal/tracing.h" namespace absl { ABSL_NAMESPACE_BEGIN @@ -40,6 +41,7 @@ bool BlockingCounter::DecrementCount() { ABSL_RAW_CHECK(count >= 0, "BlockingCounter::DecrementCount() called too many times"); if (count == 0) { + base_internal::TraceSignal(this, TraceObjectKind()); MutexLock l(&lock_); done_ = true; return true; @@ -48,19 +50,23 @@ bool BlockingCounter::DecrementCount() { } void BlockingCounter::Wait() { - MutexLock l(&this->lock_); + base_internal::TraceWait(this, TraceObjectKind()); + { + MutexLock l(&this->lock_); - // only one thread may call Wait(). To support more than one thread, - // implement a counter num_to_exit, like in the Barrier class. - ABSL_RAW_CHECK(num_waiting_ == 0, "multiple threads called Wait()"); - num_waiting_++; + // only one thread may call Wait(). To support more than one thread, + // implement a counter num_to_exit, like in the Barrier class. + ABSL_RAW_CHECK(num_waiting_ == 0, "multiple threads called Wait()"); + num_waiting_++; - this->lock_.Await(Condition(IsDone, &this->done_)); + this->lock_.Await(Condition(IsDone, &this->done_)); - // At this point, we know that all threads executing DecrementCount - // will not touch this object again. - // Therefore, the thread calling this method is free to delete the object - // after we return from this method. + // At this point, we know that all threads executing DecrementCount + // will not touch this object again. + // Therefore, the thread calling this method is free to delete the object + // after we return from this method. + } + base_internal::TraceContinue(this, TraceObjectKind()); } ABSL_NAMESPACE_END diff --git a/third_party/abseil-cpp/absl/synchronization/blocking_counter.h b/third_party/abseil-cpp/absl/synchronization/blocking_counter.h index 1908fdb1..d0504a19 100644 --- a/third_party/abseil-cpp/absl/synchronization/blocking_counter.h +++ b/third_party/abseil-cpp/absl/synchronization/blocking_counter.h @@ -22,6 +22,7 @@ #include +#include "absl/base/internal/tracing.h" #include "absl/base/thread_annotations.h" #include "absl/synchronization/mutex.h" @@ -89,6 +90,11 @@ class BlockingCounter { void Wait(); private: + // Convenience helper to reduce verbosity at call sites. + static inline constexpr base_internal::ObjectKind TraceObjectKind() { + return base_internal::ObjectKind::kBlockingCounter; + } + Mutex lock_; std::atomic count_; int num_waiting_ ABSL_GUARDED_BY(lock_); diff --git a/third_party/abseil-cpp/absl/synchronization/blocking_counter_test.cc b/third_party/abseil-cpp/absl/synchronization/blocking_counter_test.cc index 06885f57..0c42b56a 100644 --- a/third_party/abseil-cpp/absl/synchronization/blocking_counter_test.cc +++ b/third_party/abseil-cpp/absl/synchronization/blocking_counter_test.cc @@ -15,9 +15,13 @@ #include "absl/synchronization/blocking_counter.h" #include // NOLINT(build/c++11) +#include #include #include "gtest/gtest.h" +#include "absl/base/attributes.h" +#include "absl/base/config.h" +#include "absl/base/internal/tracing.h" #include "absl/time/clock.h" #include "absl/time/time.h" @@ -76,5 +80,67 @@ TEST(BlockingCounterTest, WaitNegativeInitialCount) { #endif } // namespace + +#if ABSL_HAVE_ATTRIBUTE_WEAK + +namespace base_internal { + +namespace { + +using TraceRecord = std::tuple; + +thread_local TraceRecord tls_signal; +thread_local TraceRecord tls_wait; +thread_local TraceRecord tls_continue; + +} // namespace + +// Strong extern "C" implementation. +extern "C" { + +void ABSL_INTERNAL_C_SYMBOL(AbslInternalTraceWait)(const void* object, + ObjectKind kind) { + tls_wait = {object, kind}; +} + +void ABSL_INTERNAL_C_SYMBOL(AbslInternalTraceContinue)(const void* object, + ObjectKind kind) { + tls_continue = {object, kind}; +} + +void ABSL_INTERNAL_C_SYMBOL(AbslInternalTraceSignal)(const void* object, + ObjectKind kind) { + tls_signal = {object, kind}; +} + +} // extern "C" + +TEST(BlockingCounterTest, TracesSignal) { + BlockingCounter counter(2); + + tls_signal = {}; + counter.DecrementCount(); + EXPECT_EQ(tls_signal, TraceRecord(nullptr, ObjectKind::kUnknown)); + + tls_signal = {}; + counter.DecrementCount(); + EXPECT_EQ(tls_signal, TraceRecord(&counter, ObjectKind::kBlockingCounter)); +} + +TEST(BlockingCounterTest, TracesWaitContinue) { + BlockingCounter counter(1); + counter.DecrementCount(); + + tls_wait = {}; + tls_continue = {}; + counter.Wait(); + EXPECT_EQ(tls_wait, TraceRecord(&counter, ObjectKind::kBlockingCounter)); + EXPECT_EQ(tls_continue, TraceRecord(&counter, ObjectKind::kBlockingCounter)); +} + +} // namespace base_internal + +#endif // ABSL_HAVE_ATTRIBUTE_WEAK + ABSL_NAMESPACE_END } // namespace absl diff --git a/third_party/abseil-cpp/absl/synchronization/notification.cc b/third_party/abseil-cpp/absl/synchronization/notification.cc index 165ba669..a5853ab3 100644 --- a/third_party/abseil-cpp/absl/synchronization/notification.cc +++ b/third_party/abseil-cpp/absl/synchronization/notification.cc @@ -17,6 +17,7 @@ #include #include "absl/base/internal/raw_logging.h" +#include "absl/base/internal/tracing.h" #include "absl/synchronization/mutex.h" #include "absl/time/time.h" @@ -24,6 +25,7 @@ namespace absl { ABSL_NAMESPACE_BEGIN void Notification::Notify() { + base_internal::TraceSignal(this, TraceObjectKind()); MutexLock l(&this->mutex_); #ifndef NDEBUG @@ -45,31 +47,37 @@ Notification::~Notification() { } void Notification::WaitForNotification() const { + base_internal::TraceWait(this, TraceObjectKind()); if (!HasBeenNotifiedInternal(&this->notified_yet_)) { - this->mutex_.LockWhen(Condition(&HasBeenNotifiedInternal, - &this->notified_yet_)); + this->mutex_.LockWhen( + Condition(&HasBeenNotifiedInternal, &this->notified_yet_)); this->mutex_.Unlock(); } + base_internal::TraceContinue(this, TraceObjectKind()); } bool Notification::WaitForNotificationWithTimeout( absl::Duration timeout) const { + base_internal::TraceWait(this, TraceObjectKind()); bool notified = HasBeenNotifiedInternal(&this->notified_yet_); if (!notified) { notified = this->mutex_.LockWhenWithTimeout( Condition(&HasBeenNotifiedInternal, &this->notified_yet_), timeout); this->mutex_.Unlock(); } + base_internal::TraceContinue(notified ? this : nullptr, TraceObjectKind()); return notified; } bool Notification::WaitForNotificationWithDeadline(absl::Time deadline) const { + base_internal::TraceWait(this, TraceObjectKind()); bool notified = HasBeenNotifiedInternal(&this->notified_yet_); if (!notified) { notified = this->mutex_.LockWhenWithDeadline( Condition(&HasBeenNotifiedInternal, &this->notified_yet_), deadline); this->mutex_.Unlock(); } + base_internal::TraceContinue(notified ? this : nullptr, TraceObjectKind()); return notified; } diff --git a/third_party/abseil-cpp/absl/synchronization/notification.h b/third_party/abseil-cpp/absl/synchronization/notification.h index 8986d9a4..78cdf296 100644 --- a/third_party/abseil-cpp/absl/synchronization/notification.h +++ b/third_party/abseil-cpp/absl/synchronization/notification.h @@ -53,6 +53,7 @@ #include #include "absl/base/attributes.h" +#include "absl/base/internal/tracing.h" #include "absl/synchronization/mutex.h" #include "absl/time/time.h" @@ -75,7 +76,11 @@ class Notification { // // Returns the value of the notification's internal "notified" state. ABSL_MUST_USE_RESULT bool HasBeenNotified() const { - return HasBeenNotifiedInternal(&this->notified_yet_); + if (HasBeenNotifiedInternal(&this->notified_yet_)) { + base_internal::TraceObserved(this, TraceObjectKind()); + return true; + } + return false; } // Notification::WaitForNotification() @@ -108,6 +113,11 @@ class Notification { void Notify(); private: + // Convenience helper to reduce verbosity at call sites. + static inline constexpr base_internal::ObjectKind TraceObjectKind() { + return base_internal::ObjectKind::kNotification; + } + static inline bool HasBeenNotifiedInternal( const std::atomic* notified_yet) { return notified_yet->load(std::memory_order_acquire); diff --git a/third_party/abseil-cpp/absl/synchronization/notification_test.cc b/third_party/abseil-cpp/absl/synchronization/notification_test.cc index 49ce61a5..027bc05b 100644 --- a/third_party/abseil-cpp/absl/synchronization/notification_test.cc +++ b/third_party/abseil-cpp/absl/synchronization/notification_test.cc @@ -15,10 +15,15 @@ #include "absl/synchronization/notification.h" #include // NOLINT(build/c++11) +#include #include #include "gtest/gtest.h" +#include "absl/base/attributes.h" +#include "absl/base/config.h" +#include "absl/base/internal/tracing.h" #include "absl/synchronization/mutex.h" +#include "absl/time/time.h" namespace absl { ABSL_NAMESPACE_BEGIN @@ -129,5 +134,93 @@ TEST(NotificationTest, SanityTest) { BasicTests(true, &local_notification2); } +#if ABSL_HAVE_ATTRIBUTE_WEAK + +namespace base_internal { + +namespace { + +using TraceRecord = std::tuple; + +thread_local TraceRecord tls_signal; +thread_local TraceRecord tls_wait; +thread_local TraceRecord tls_continue; +thread_local TraceRecord tls_observed; + +} // namespace + +// Strong extern "C" implementation. +extern "C" { + +void ABSL_INTERNAL_C_SYMBOL(AbslInternalTraceWait)(const void* object, + ObjectKind kind) { + tls_wait = {object, kind}; +} + +void ABSL_INTERNAL_C_SYMBOL(AbslInternalTraceContinue)(const void* object, + ObjectKind kind) { + tls_continue = {object, kind}; +} + +void ABSL_INTERNAL_C_SYMBOL(AbslInternalTraceSignal)(const void* object, + ObjectKind kind) { + tls_signal = {object, kind}; +} + +void ABSL_INTERNAL_C_SYMBOL(AbslInternalTraceObserved)(const void* object, + ObjectKind kind) { + tls_observed = {object, kind}; +} + +} // extern "C" + +TEST(NotificationTest, TracesNotify) { + Notification n; + tls_signal = {}; + n.Notify(); + EXPECT_EQ(tls_signal, TraceRecord(&n, ObjectKind::kNotification)); +} + +TEST(NotificationTest, TracesWaitForNotification) { + Notification n; + n.Notify(); + tls_wait = tls_continue = {}; + n.WaitForNotification(); + EXPECT_EQ(tls_wait, TraceRecord(&n, ObjectKind::kNotification)); + EXPECT_EQ(tls_continue, TraceRecord(&n, ObjectKind::kNotification)); +} + +TEST(NotificationTest, TracesWaitForNotificationWithTimeout) { + Notification n; + + tls_wait = tls_continue = {}; + n.WaitForNotificationWithTimeout(absl::Milliseconds(1)); + EXPECT_EQ(tls_wait, TraceRecord(&n, ObjectKind::kNotification)); + EXPECT_EQ(tls_continue, TraceRecord(nullptr, ObjectKind::kNotification)); + + n.Notify(); + tls_wait = tls_continue = {}; + n.WaitForNotificationWithTimeout(absl::Milliseconds(1)); + EXPECT_EQ(tls_wait, TraceRecord(&n, ObjectKind::kNotification)); + EXPECT_EQ(tls_continue, TraceRecord(&n, ObjectKind::kNotification)); +} + +TEST(NotificationTest, TracesHasBeenNotified) { + Notification n; + + tls_observed = {}; + ASSERT_FALSE(n.HasBeenNotified()); + EXPECT_EQ(tls_observed, TraceRecord(nullptr, ObjectKind::kUnknown)); + + n.Notify(); + tls_observed = {}; + ASSERT_TRUE(n.HasBeenNotified()); + EXPECT_EQ(tls_observed, TraceRecord(&n, ObjectKind::kNotification)); +} + +} // namespace base_internal + +#endif // ABSL_HAVE_ATTRIBUTE_WEAK + ABSL_NAMESPACE_END } // namespace absl diff --git a/third_party/abseil-cpp/ci/cmake_common.sh b/third_party/abseil-cpp/ci/cmake_common.sh index 784b3815..c8a5b857 100644 --- a/third_party/abseil-cpp/ci/cmake_common.sh +++ b/third_party/abseil-cpp/ci/cmake_common.sh @@ -14,16 +14,6 @@ # The commit of GoogleTest to be used in the CMake tests in this directory. # Keep this in sync with the commit in the WORKSPACE file. -# TODO(dmauro): After the next GoogleTest release, use the stable file required -# by Bzlmod. This means downloading a copy of the file and reuploading it to -# avoid changing checksums if the compression is changed by GitHub. It also -# means stop referring to it as a commit and instead use the uploaded filename. -readonly ABSL_GOOGLETEST_COMMIT="f8d7d77c06936315286eb55f8de22cd23c188571" +readonly ABSL_GOOGLETEST_VERSION="1.15.2" -# Avoid depending on GitHub by looking for a cached copy of the commit first. -if [[ -r "${KOKORO_GFILE_DIR:-}/distdir/${ABSL_GOOGLETEST_COMMIT}.zip" ]]; then - DOCKER_EXTRA_ARGS="--mount type=bind,source=${KOKORO_GFILE_DIR}/distdir,target=/distdir,readonly ${DOCKER_EXTRA_ARGS:-}" - ABSL_GOOGLETEST_DOWNLOAD_URL="file:///distdir/${ABSL_GOOGLETEST_COMMIT}.zip" -else - ABSL_GOOGLETEST_DOWNLOAD_URL="https://github.com/google/googletest/archive/${ABSL_GOOGLETEST_COMMIT}.zip" -fi +readonly ABSL_GOOGLETEST_DOWNLOAD_URL="https://github.com/google/googletest/releases/download/v${ABSL_GOOGLETEST_VERSION}/googletest-${ABSL_GOOGLETEST_VERSION}.tar.gz" diff --git a/third_party/abseil-cpp/ci/cmake_install_test.sh b/third_party/abseil-cpp/ci/cmake_install_test.sh index ab3b86f0..871490ff 100755 --- a/third_party/abseil-cpp/ci/cmake_install_test.sh +++ b/third_party/abseil-cpp/ci/cmake_install_test.sh @@ -48,7 +48,7 @@ for link_type in ${LINK_TYPE}; do --tmpfs=/abseil-cpp:exec \ --workdir=/abseil-cpp \ --cap-add=SYS_PTRACE \ - -e "ABSL_GOOGLETEST_COMMIT=${ABSL_GOOGLETEST_COMMIT}" \ + -e "ABSL_GOOGLETEST_VERSION=${ABSL_GOOGLETEST_VERSION}" \ -e "ABSL_GOOGLETEST_DOWNLOAD_URL=${ABSL_GOOGLETEST_DOWNLOAD_URL}" \ -e "LINK_TYPE=${link_type}" \ --rm \ diff --git a/third_party/abseil-cpp/ci/linux_docker_containers.sh b/third_party/abseil-cpp/ci/linux_docker_containers.sh index bd8d8ef3..fefef928 100644 --- a/third_party/abseil-cpp/ci/linux_docker_containers.sh +++ b/third_party/abseil-cpp/ci/linux_docker_containers.sh @@ -18,5 +18,5 @@ readonly LINUX_ALPINE_CONTAINER="gcr.io/google.com/absl-177019/alpine:20230612" readonly LINUX_CLANG_LATEST_CONTAINER="gcr.io/google.com/absl-177019/linux_hybrid-latest:20240523" readonly LINUX_ARM_CLANG_LATEST_CONTAINER="gcr.io/google.com/absl-177019/linux_arm_hybrid-latest:20231219" -readonly LINUX_GCC_LATEST_CONTAINER="gcr.io/google.com/absl-177019/linux_hybrid-latest:20240523_rc1" -readonly LINUX_GCC_FLOOR_CONTAINER="gcr.io/google.com/absl-177019/linux_gcc-floor:20230120" +readonly LINUX_GCC_LATEST_CONTAINER="gcr.io/google.com/absl-177019/linux_hybrid-latest:20240523" +readonly LINUX_GCC_FLOOR_CONTAINER="gcr.io/google.com/absl-177019/linux_gcc-floor:20240717" diff --git a/third_party/abseil-cpp/ci/linux_gcc-floor_libstdcxx_bazel.sh b/third_party/abseil-cpp/ci/linux_gcc-floor_libstdcxx_bazel.sh index 5bd1dbf8..b2d8c1da 100755 --- a/third_party/abseil-cpp/ci/linux_gcc-floor_libstdcxx_bazel.sh +++ b/third_party/abseil-cpp/ci/linux_gcc-floor_libstdcxx_bazel.sh @@ -59,9 +59,6 @@ if [[ ${KOKORO_GFILE_DIR:-} ]] && [[ -d "${KOKORO_GFILE_DIR}/distdir" ]]; then BAZEL_EXTRA_ARGS="--distdir=/distdir ${BAZEL_EXTRA_ARGS:-}" fi -# TODO(absl-team): This currently uses Bazel 5. When upgrading to a version -# of Bazel that supports Bzlmod, add --enable_bzlmod=false to keep test -# coverage for the old WORKSPACE dependency management. for std in ${STD}; do for compilation_mode in ${COMPILATION_MODE}; do for exceptions_mode in ${EXCEPTIONS_MODE}; do @@ -82,6 +79,7 @@ for std in ${STD}; do --copt=-Werror \ --define="absl=1" \ --distdir="/bazel-distdir" \ + --enable_bzlmod=false \ --features=external_include_paths \ --keep_going \ --show_timestamps \ diff --git a/third_party/abseil-cpp/ci/macos_xcode_cmake.sh b/third_party/abseil-cpp/ci/macos_xcode_cmake.sh index c8780384..eba2fb5e 100755 --- a/third_party/abseil-cpp/ci/macos_xcode_cmake.sh +++ b/third_party/abseil-cpp/ci/macos_xcode_cmake.sh @@ -23,11 +23,6 @@ ABSEIL_ROOT=$(realpath ${ABSEIL_ROOT}) source "${ABSEIL_ROOT}/ci/cmake_common.sh" -# The MacOS build doesn't run in a docker container, so we have to override ABSL_GOOGLETEST_DOWNLOAD_URL. -if [[ -r "${KOKORO_GFILE_DIR}/distdir/${ABSL_GOOGLETEST_COMMIT}.zip" ]]; then - ABSL_GOOGLETEST_DOWNLOAD_URL="file://${KOKORO_GFILE_DIR}/distdir/${ABSL_GOOGLETEST_COMMIT}.zip" -fi - if [[ -z ${ABSL_CMAKE_BUILD_TYPES:-} ]]; then ABSL_CMAKE_BUILD_TYPES="Debug" fi diff --git a/third_party/abseil-cpp/ci/windows_msvc_cmake.bat b/third_party/abseil-cpp/ci/windows_msvc_cmake.bat index 8b431d77..c9aee785 100755 --- a/third_party/abseil-cpp/ci/windows_msvc_cmake.bat +++ b/third_party/abseil-cpp/ci/windows_msvc_cmake.bat @@ -14,19 +14,10 @@ SETLOCAL ENABLEDELAYEDEXPANSION -:: The commit of GoogleTest to be used in the CMake tests in this directory. -:: Keep this in sync with the commit in the WORKSPACE file. -:: TODO(dmauro): After the next GoogleTest release, use the stable file required -:: by Bzlmod. This means downloading a copy of the file and reuploading it to -:: avoid changing checksums if the compression is changed by GitHub. It also -:: means stop referring to it as a commit and instead use the uploaded filename. -SET ABSL_GOOGLETEST_COMMIT=f8d7d77c06936315286eb55f8de22cd23c188571 - -IF EXIST %KOKORO_GFILE_DIR%\distdir\%ABSL_GOOGLETEST_COMMIT%.zip ( - SET ABSL_GOOGLETEST_DOWNLOAD_URL=file://%KOKORO_GFILE_DIR%\distdir\%ABSL_GOOGLETEST_COMMIT%.zip -) ELSE ( - SET ABSL_GOOGLETEST_DOWNLOAD_URL=https://github.com/google/googletest/archive/%ABSL_GOOGLETEST_COMMIT%.zip -) +:: The version of GoogleTest to be used in the CMake tests in this directory. +:: Keep this in sync with the version in the WORKSPACE file. +SET ABSL_GOOGLETEST_VERSION=1.15.2 +SET ABSL_GOOGLETEST_DOWNLOAD_URL=https://github.com/google/googletest/releases/download/v%ABSL_GOOGLETEST_VERSION%/googletest-%ABSL_GOOGLETEST_VERSION%.tar.gz :: Replace '\' with '/' in Windows paths for CMake. :: Note that this cannot go inside the IF block above, because BAT files are weird. diff --git a/third_party/bazel-skylib/rules/BUILD b/third_party/bazel-skylib/rules/BUILD index 293fd035..ee04c770 100644 --- a/third_party/bazel-skylib/rules/BUILD +++ b/third_party/bazel-skylib/rules/BUILD @@ -68,6 +68,7 @@ filegroup( testonly = True, srcs = [ "BUILD", + "//rules/private:test_deps", ] + glob(["*.bzl"]), visibility = ["//:__subpackages__"], # Needs skylib's root BUILD file for default_applicable_licenses ) diff --git a/third_party/bazel-skylib/rules/private/BUILD b/third_party/bazel-skylib/rules/private/BUILD index a876c7c0..44f1509c 100644 --- a/third_party/bazel-skylib/rules/private/BUILD +++ b/third_party/bazel-skylib/rules/private/BUILD @@ -49,6 +49,17 @@ bzl_library( srcs = ["maprule_util.bzl"], ) +filegroup( + name = "test_deps", + testonly = True, + srcs = [ + "BUILD", + ] + glob(["*.bzl"]), + visibility = [ + "//rules:__pkg__", + ], +) + # The files needed for distribution filegroup( name = "distribution", diff --git a/third_party/fmt/.github/workflows/lint.yml b/third_party/fmt/.github/workflows/lint.yml index 0792356f..51a62f46 100644 --- a/third_party/fmt/.github/workflows/lint.yml +++ b/third_party/fmt/.github/workflows/lint.yml @@ -8,7 +8,6 @@ on: permissions: contents: read - pull-requests: write jobs: format_code: @@ -22,35 +21,6 @@ jobs: clangformat: 17.0.5 - name: Run clang-format - id: clang_format run: | find include src -name '*.h' -o -name '*.cc' | xargs clang-format -i -style=file -fallback-style=none - git diff | tee fmt.patch - if [ -s fmt.patch ]; then - exit 1 - fi - - uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1 - if: failure() && steps.clang_format.outcome == 'failure' - with: - github-token: ${{ secrets.KEY }} - script: | - const fs = require('fs'); - const patch = fs.readFileSync('fmt.patch', { encoding: 'utf8' }); - const comment = `clang-format 17.0.5 found issues in the formatting in your code: -
- - View the diff from clang-format: - - - \`\`\`diff - ${patch} - \`\`\` - -
- `; - await github.rest.issues.createComment({ - issue_number: context.issue.number, - owner: context.repo.owner, - repo: context.repo.repo, - body: comment - }); + git diff --exit-code diff --git a/third_party/fmt/.github/workflows/scorecard.yml b/third_party/fmt/.github/workflows/scorecard.yml index 15582407..8640f697 100644 --- a/third_party/fmt/.github/workflows/scorecard.yml +++ b/third_party/fmt/.github/workflows/scorecard.yml @@ -34,7 +34,7 @@ jobs: persist-credentials: false - name: "Run analysis" - uses: ossf/scorecard-action@dc50aa9510b46c811795eb24b2f1ba02a914e534 # v2.3.3 + uses: ossf/scorecard-action@62b2cac7ed8198b15735ed49ab1e5cf35480ba46 # v2.4.0 with: results_file: results.sarif results_format: sarif diff --git a/third_party/fmt/.github/workflows/windows.yml b/third_party/fmt/.github/workflows/windows.yml index 014396b9..c9d56e09 100644 --- a/third_party/fmt/.github/workflows/windows.yml +++ b/third_party/fmt/.github/workflows/windows.yml @@ -83,7 +83,7 @@ jobs: - name: Set timezone run: tzutil /s "Ekaterinburg Standard Time" shell: cmd - - uses: msys2/setup-msys2@d0e80f58dffbc64f6a3a1f43527d469b4fc7b6c8 # v2.23.0 + - uses: msys2/setup-msys2@5df0ca6cbf14efcd08f8d5bd5e049a3cc8e07fd2 # v2.24.0 with: release: false msystem: ${{matrix.sys}} diff --git a/third_party/fmt/ChangeLog.md b/third_party/fmt/ChangeLog.md index d3b7585f..6ba567d1 100644 --- a/third_party/fmt/ChangeLog.md +++ b/third_party/fmt/ChangeLog.md @@ -1,11 +1,30 @@ -# 11.0.2 - TBD +# 11.0.2 - 2024-07-20 - Fixed compatibility with non-POSIX systems - (https://github.com/fmtlib/fmt/issues/4054). + (https://github.com/fmtlib/fmt/issues/4054, + https://github.com/fmtlib/fmt/issues/4060). + +- Fixed performance regressions when using `std::back_insert_iterator` with + `fmt::format_to` (https://github.com/fmtlib/fmt/issues/4070). + +- Fixed handling of `std::generator` and move-only iterators + (https://github.com/fmtlib/fmt/issues/4053, + https://github.com/fmtlib/fmt/pull/4057). Thanks @Arghnews. + +- Made `formatter::parse` work with types convertible to + `std::string_view` (https://github.com/fmtlib/fmt/issues/4036, + https://github.com/fmtlib/fmt/pull/4055). Thanks @Arghnews. + +- Made `volatile void*` formattable + (https://github.com/fmtlib/fmt/issues/4049, + https://github.com/fmtlib/fmt/pull/4056). Thanks @Arghnews. - Made `Glib::ustring` not be confused with `std::string` (https://github.com/fmtlib/fmt/issues/4052). +- Made `fmt::context` iterator compatible with STL algorithms that rely on + iterator category (https://github.com/fmtlib/fmt/issues/4079). + # 11.0.1 - 2024-07-05 - Fixed version number in the inline namespace @@ -21,6 +40,9 @@ (https://github.com/fmtlib/fmt/pull/4034, https://github.com/fmtlib/fmt/pull/4050). Thanks @tesch1 and @phprus. +- Fixed ADL issues in `fmt::printf` when using C++20 + (https://github.com/fmtlib/fmt/pull/4042). Thanks @toge. + - Removed a redundant check in the formatter for `std::expected` (https://github.com/fmtlib/fmt/pull/4040). Thanks @phprus. diff --git a/third_party/fmt/include/fmt/base.h b/third_party/fmt/include/fmt/base.h index dd4b2210..ab7a238f 100644 --- a/third_party/fmt/include/fmt/base.h +++ b/third_party/fmt/include/fmt/base.h @@ -23,7 +23,7 @@ #endif // The fmt library version in the form major * 10000 + minor * 100 + patch. -#define FMT_VERSION 110001 +#define FMT_VERSION 110002 // Detect compiler versions. #if defined(__clang__) && !defined(__ibmxl__) @@ -494,7 +494,8 @@ struct is_back_insert_iterator< // Extracts a reference to the container from *insert_iterator. template -inline auto get_container(OutputIt it) -> typename OutputIt::container_type& { +inline FMT_CONSTEXPR20 auto get_container(OutputIt it) -> + typename OutputIt::container_type& { struct accessor : OutputIt { accessor(OutputIt base) : OutputIt(base) {} using OutputIt::container; @@ -901,7 +902,7 @@ template class buffer { FMT_CONSTEXPR auto data() const noexcept -> const T* { return ptr_; } /// Clears this buffer. - void clear() { size_ = 0; } + FMT_CONSTEXPR void clear() { size_ = 0; } // Tries resizing the buffer to contain `count` elements. If T is a POD type // the new elements may not be initialized. @@ -924,7 +925,15 @@ template class buffer { } /// Appends data to the end of the buffer. - template void append(const U* begin, const U* end) { + template +// Workaround for Visual Studio 2019 to fix error C2893: Failed to specialize +// function template 'void fmt::v11::detail::buffer::append(const U *,const +// U *)' +#if !FMT_MSC_VERSION || FMT_MSC_VERSION >= 1930 + FMT_CONSTEXPR20 +#endif + void + append(const U* begin, const U* end) { while (begin != end) { auto count = to_unsigned(end - begin); try_reserve(size_ + count); @@ -1086,7 +1095,7 @@ class iterator_buffer< explicit iterator_buffer(OutputIt out, size_t = 0) : iterator_buffer(get_container(out)) {} - auto out() -> OutputIt { return back_inserter(container_); } + auto out() -> OutputIt { return OutputIt(container_); } }; // A buffer that counts the number of code units written discarding the output. @@ -1103,9 +1112,11 @@ template class counting_buffer : public buffer { } public: - counting_buffer() : buffer(grow, data_, 0, buffer_size) {} + FMT_CONSTEXPR counting_buffer() : buffer(grow, data_, 0, buffer_size) {} - auto count() -> size_t { return count_ + this->size(); } + constexpr auto count() const noexcept -> size_t { + return count_ + this->size(); + } }; } // namespace detail @@ -1155,7 +1166,8 @@ template class basic_appender { private: detail::buffer* buffer_; - friend auto get_container(basic_appender app) -> detail::buffer& { + friend FMT_CONSTEXPR20 auto get_container(basic_appender app) + -> detail::buffer& { return *app.buffer_; } @@ -1170,13 +1182,13 @@ template class basic_appender { FMT_CONSTEXPR basic_appender(detail::buffer& buf) : buffer_(&buf) {} - auto operator=(T c) -> basic_appender& { + FMT_CONSTEXPR20 auto operator=(T c) -> basic_appender& { buffer_->push_back(c); return *this; } - auto operator*() -> basic_appender& { return *this; } - auto operator++() -> basic_appender& { return *this; } - auto operator++(int) -> basic_appender { return *this; } + FMT_CONSTEXPR20 auto operator*() -> basic_appender& { return *this; } + FMT_CONSTEXPR20 auto operator++() -> basic_appender& { return *this; } + FMT_CONSTEXPR20 auto operator++(int) -> basic_appender { return *this; } }; using appender = basic_appender; @@ -1185,24 +1197,11 @@ namespace detail { template struct is_back_insert_iterator> : std::true_type {}; -template -struct locking : std::true_type {}; -template -struct locking>::nonlocking>> - : std::false_type {}; - -template FMT_CONSTEXPR inline auto is_locking() -> bool { - return locking::value; -} -template -FMT_CONSTEXPR inline auto is_locking() -> bool { - return locking::value || is_locking(); -} - // An optimized version of std::copy with the output value type (T). template ::value)> -auto copy(InputIt begin, InputIt end, OutputIt out) -> OutputIt { +FMT_CONSTEXPR20 auto copy(InputIt begin, InputIt end, OutputIt out) + -> OutputIt { get_container(out).append(begin, end); return out; } @@ -1869,7 +1868,7 @@ template class basic_format_args { FMT_CONSTEXPR auto type(int index) const -> detail::type { int shift = index * detail::packed_arg_bits; - unsigned int mask = (1 << detail::packed_arg_bits) - 1; + unsigned mask = (1 << detail::packed_arg_bits) - 1; return static_cast((desc_ >> shift) & mask); } @@ -1909,8 +1908,7 @@ template class basic_format_args { } if (static_cast(id) >= detail::max_packed_args) return arg; arg.type_ = type(id); - if (arg.type_ == detail::type::none_type) return arg; - arg.value_ = values_[id]; + if (arg.type_ != detail::type::none_type) arg.value_ = values_[id]; return arg; } @@ -2053,76 +2051,30 @@ FMT_END_EXPORT // between clang and gcc on ARM (#1919). FMT_EXPORT using format_args = basic_format_args; -// We cannot use enum classes as bit fields because of a gcc bug, so we put them -// in namespaces instead (https://gcc.gnu.org/bugzilla/show_bug.cgi?id=61414). -// Additionally, if an underlying type is specified, older gcc incorrectly warns -// that the type is too small. Both bugs are fixed in gcc 9.3. -#if FMT_GCC_VERSION && FMT_GCC_VERSION < 903 -# define FMT_ENUM_UNDERLYING_TYPE(type) -#else -# define FMT_ENUM_UNDERLYING_TYPE(type) : type -#endif -namespace align { -enum type FMT_ENUM_UNDERLYING_TYPE(unsigned char){none, left, right, center, - numeric}; +namespace detail { + +template +struct locking : bool_constant::value == + type::custom_type> {}; +template +struct locking>::nonlocking>> + : std::false_type {}; + +template FMT_CONSTEXPR inline auto is_locking() -> bool { + return locking::value; } -using align_t = align::type; -namespace sign { -enum type FMT_ENUM_UNDERLYING_TYPE(unsigned char){none, minus, plus, space}; +template +FMT_CONSTEXPR inline auto is_locking() -> bool { + return locking::value || is_locking(); } -using sign_t = sign::type; - -namespace detail { template using unsigned_char = typename conditional_t::value, std::make_unsigned, type_identity>::type; -// Character (code unit) type is erased to prevent template bloat. -struct fill_t { - private: - enum { max_size = 4 }; - char data_[max_size] = {' '}; - unsigned char size_ = 1; - - public: - template - FMT_CONSTEXPR void operator=(basic_string_view s) { - auto size = s.size(); - size_ = static_cast(size); - if (size == 1) { - unsigned uchar = static_cast>(s[0]); - data_[0] = static_cast(uchar); - data_[1] = static_cast(uchar >> 8); - return; - } - FMT_ASSERT(size <= max_size, "invalid fill"); - for (size_t i = 0; i < size; ++i) data_[i] = static_cast(s[i]); - } - - FMT_CONSTEXPR void operator=(char c) { - data_[0] = c; - size_ = 1; - } - - constexpr auto size() const -> size_t { return size_; } - - template constexpr auto get() const -> Char { - using uchar = unsigned char; - return static_cast(static_cast(data_[0]) | - (static_cast(data_[1]) << 8)); - } +enum class arg_id_kind { none, index, name }; - template ::value)> - constexpr auto data() const -> const Char* { - return data_; - } - template ::value)> - constexpr auto data() const -> const Char* { - return nullptr; - } -}; } // namespace detail enum class presentation_type : unsigned char { @@ -2148,56 +2100,170 @@ enum class presentation_type : unsigned char { hexfloat // 'a' or 'A' }; +enum class align { none, left, right, center, numeric }; +enum class sign { none, minus, plus, space }; + +// Basic format specifiers for built-in and string types. +class basic_specs { + private: + // Data is arranged as follows: + // + // 0 1 2 3 + // 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 + // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + // |type |align| w | p | s |u|#|L| f | unused | + // +-----+-----+---+---+---+-+-+-+-----+---------------------------+ + // + // w - dynamic width info + // p - dynamic precision info + // s - sign + // u - uppercase (e.g. 'X' for 'x') + // # - alternate form ('#') + // L - localized + // f - fill size + // + // Bitfields are not used because of compiler bugs such as gcc bug 61414. + enum : unsigned { + type_mask = 0x00007, + align_mask = 0x00038, + width_mask = 0x000C0, + precision_mask = 0x00300, + sign_mask = 0x00C00, + uppercase_mask = 0x01000, + alternate_mask = 0x02000, + localized_mask = 0x04000, + fill_size_mask = 0x38000, + + align_shift = 3, + width_shift = 6, + precision_shift = 8, + sign_shift = 10, + fill_size_shift = 15, + + max_fill_size = 4 + }; + + unsigned long data_ = 1 << fill_size_shift; + + // Character (code unit) type is erased to prevent template bloat. + char fill_data_[max_fill_size] = {' '}; + + FMT_CONSTEXPR void set_fill_size(size_t size) { + data_ = (data_ & ~fill_size_mask) | (size << fill_size_shift); + } + + public: + constexpr auto type() const -> presentation_type { + return static_cast(data_ & type_mask); + } + FMT_CONSTEXPR void set_type(presentation_type t) { + data_ = (data_ & ~type_mask) | static_cast(t); + } + + constexpr auto align() const -> align { + return static_cast((data_ & align_mask) >> align_shift); + } + FMT_CONSTEXPR void set_align(fmt::align a) { + data_ = (data_ & ~align_mask) | (static_cast(a) << align_shift); + } + + constexpr auto dynamic_width() const -> detail::arg_id_kind { + return static_cast((data_ & width_mask) >> + width_shift); + } + FMT_CONSTEXPR void set_dynamic_width(detail::arg_id_kind w) { + data_ = (data_ & ~width_mask) | (static_cast(w) << width_shift); + } + + FMT_CONSTEXPR auto dynamic_precision() const -> detail::arg_id_kind { + return static_cast((data_ & precision_mask) >> + precision_shift); + } + FMT_CONSTEXPR void set_dynamic_precision(detail::arg_id_kind p) { + data_ = (data_ & ~precision_mask) | + (static_cast(p) << precision_shift); + } + + constexpr bool dynamic() const { + return (data_ & (width_mask | precision_mask)) != 0; + } + + constexpr auto sign() const -> sign { + return static_cast((data_ & sign_mask) >> sign_shift); + } + FMT_CONSTEXPR void set_sign(fmt::sign s) { + data_ = (data_ & ~sign_mask) | (static_cast(s) << sign_shift); + } + + constexpr auto upper() const -> bool { return (data_ & uppercase_mask) != 0; } + FMT_CONSTEXPR void set_upper() { data_ |= uppercase_mask; } + + constexpr auto alt() const -> bool { return (data_ & alternate_mask) != 0; } + FMT_CONSTEXPR void set_alt() { data_ |= alternate_mask; } + FMT_CONSTEXPR void clear_alt() { data_ &= ~alternate_mask; } + + constexpr auto localized() const -> bool { + return (data_ & localized_mask) != 0; + } + FMT_CONSTEXPR void set_localized() { data_ |= localized_mask; } + + constexpr auto fill_size() const -> size_t { + return (data_ & fill_size_mask) >> fill_size_shift; + } + + template ::value)> + constexpr auto fill() const -> const Char* { + return fill_data_; + } + template ::value)> + constexpr auto fill() const -> const Char* { + return nullptr; + } + + template constexpr auto fill_unit() const -> Char { + using uchar = unsigned char; + return static_cast(static_cast(fill_data_[0]) | + (static_cast(fill_data_[1]) << 8)); + } + + FMT_CONSTEXPR void set_fill(char c) { + fill_data_[0] = c; + set_fill_size(1); + } + + template + FMT_CONSTEXPR void set_fill(basic_string_view s) { + auto size = s.size(); + set_fill_size(size); + if (size == 1) { + unsigned uchar = static_cast>(s[0]); + fill_data_[0] = static_cast(uchar); + fill_data_[1] = static_cast(uchar >> 8); + return; + } + FMT_ASSERT(size <= max_fill_size, "invalid fill"); + for (size_t i = 0; i < size; ++i) + fill_data_[i & 3] = static_cast(s[i]); + } +}; + // Format specifiers for built-in and string types. -struct format_specs { +struct format_specs : basic_specs { int width; int precision; - presentation_type type; - align_t align : 4; - sign_t sign : 3; - bool upper : 1; // An uppercase version e.g. 'X' for 'x'. - bool alt : 1; // Alternate form ('#'). - bool localized : 1; - detail::fill_t fill; - - constexpr format_specs() - : width(0), - precision(-1), - type(presentation_type::none), - align(align::none), - sign(sign::none), - upper(false), - alt(false), - localized(false) {} + + constexpr format_specs() : width(0), precision(-1) {} }; namespace detail { -enum class arg_id_kind { none, index, name }; - // An argument reference. -template struct arg_ref { - FMT_CONSTEXPR arg_ref() : kind(arg_id_kind::none), val() {} - - FMT_CONSTEXPR explicit arg_ref(int index) - : kind(arg_id_kind::index), val(index) {} - FMT_CONSTEXPR explicit arg_ref(basic_string_view name) - : kind(arg_id_kind::name), val(name) {} - - FMT_CONSTEXPR auto operator=(int idx) -> arg_ref& { - kind = arg_id_kind::index; - val.index = idx; - return *this; - } - - arg_id_kind kind; - union value { - FMT_CONSTEXPR value(int idx = 0) : index(idx) {} - FMT_CONSTEXPR value(basic_string_view n) : name(n) {} +template union arg_ref { + FMT_CONSTEXPR arg_ref(int idx = 0) : index(idx) {} + FMT_CONSTEXPR arg_ref(basic_string_view n) : name(n) {} - int index; - basic_string_view name; - } val; + int index; + basic_string_view name; }; // Format specifiers with width and precision resolved at formatting rather @@ -2264,7 +2330,7 @@ FMT_CONSTEXPR auto parse_nonnegative_int(const Char*& begin, const Char* end, : error_value; } -FMT_CONSTEXPR inline auto parse_align(char c) -> align_t { +FMT_CONSTEXPR inline auto parse_align(char c) -> align { switch (c) { case '<': return align::left; @@ -2281,8 +2347,8 @@ template constexpr auto is_name_start(Char c) -> bool { } template -FMT_CONSTEXPR auto do_parse_arg_id(const Char* begin, const Char* end, - Handler&& handler) -> const Char* { +FMT_CONSTEXPR auto parse_arg_id(const Char* begin, const Char* end, + Handler&& handler) -> const Char* { Char c = *begin; if (c >= '0' && c <= '9') { int index = 0; @@ -2308,70 +2374,88 @@ FMT_CONSTEXPR auto do_parse_arg_id(const Char* begin, const Char* end, return it; } -template -FMT_CONSTEXPR auto parse_arg_id(const Char* begin, const Char* end, - Handler&& handler) -> const Char* { - FMT_ASSERT(begin != end, ""); - Char c = *begin; - if (c != '}' && c != ':') return do_parse_arg_id(begin, end, handler); - handler.on_auto(); - return begin; -} - -template struct dynamic_spec_id_handler { +template struct dynamic_spec_handler { basic_format_parse_context& ctx; arg_ref& ref; + arg_id_kind& kind; - FMT_CONSTEXPR void on_auto() { - int id = ctx.next_arg_id(); - ref = arg_ref(id); - ctx.check_dynamic_spec(id); - } FMT_CONSTEXPR void on_index(int id) { - ref = arg_ref(id); + ref = id; + kind = arg_id_kind::index; ctx.check_arg_id(id); ctx.check_dynamic_spec(id); } FMT_CONSTEXPR void on_name(basic_string_view id) { - ref = arg_ref(id); + ref = id; + kind = arg_id_kind::name; ctx.check_arg_id(id); } }; -// Parses [integer | "{" [arg_id] "}"]. +template struct parse_dynamic_spec_result { + const Char* end; + arg_id_kind kind; +}; + +// Parses integer | "{" [arg_id] "}". template FMT_CONSTEXPR auto parse_dynamic_spec(const Char* begin, const Char* end, int& value, arg_ref& ref, basic_format_parse_context& ctx) - -> const Char* { + -> parse_dynamic_spec_result { FMT_ASSERT(begin != end, ""); + auto kind = arg_id_kind::none; if ('0' <= *begin && *begin <= '9') { int val = parse_nonnegative_int(begin, end, -1); - if (val != -1) - value = val; - else - report_error("number is too big"); - } else if (*begin == '{') { - ++begin; - auto handler = dynamic_spec_id_handler{ctx, ref}; - if (begin != end) begin = parse_arg_id(begin, end, handler); - if (begin != end && *begin == '}') return ++begin; + if (val == -1) report_error("number is too big"); + value = val; + } else { + if (*begin == '{') { + ++begin; + if (begin != end) { + Char c = *begin; + if (c == '}' || c == ':') { + int id = ctx.next_arg_id(); + ref = id; + kind = arg_id_kind::index; + ctx.check_dynamic_spec(id); + } else { + begin = parse_arg_id(begin, end, + dynamic_spec_handler{ctx, ref, kind}); + } + } + if (begin != end && *begin == '}') return {++begin, kind}; + } report_error("invalid format string"); } - return begin; + return {begin, kind}; +} + +template +FMT_CONSTEXPR auto parse_width(const Char* begin, const Char* end, + format_specs& specs, arg_ref& width_ref, + basic_format_parse_context& ctx) + -> const Char* { + auto result = parse_dynamic_spec(begin, end, specs.width, width_ref, ctx); + specs.set_dynamic_width(result.kind); + return result.end; } template FMT_CONSTEXPR auto parse_precision(const Char* begin, const Char* end, - int& value, arg_ref& ref, + format_specs& specs, + arg_ref& precision_ref, basic_format_parse_context& ctx) -> const Char* { ++begin; - if (begin == end || *begin == '}') { + if (begin == end) { report_error("invalid precision"); return begin; } - return parse_dynamic_spec(begin, end, value, ref, ctx); + auto result = + parse_dynamic_spec(begin, end, specs.precision, precision_ref, ctx); + specs.set_dynamic_precision(result.kind); + return result.end; } enum class state { start, align, sign, hash, zero, width, precision, locale }; @@ -2404,15 +2488,12 @@ FMT_CONSTEXPR auto parse_format_specs(const Char* begin, const Char* end, constexpr auto integral_set = sint_set | uint_set | bool_set | char_set; struct { const Char*& begin; - dynamic_format_specs& specs; + format_specs& specs; type arg_type; FMT_CONSTEXPR auto operator()(pres pres_type, int set) -> const Char* { - if (!in(arg_type, set)) { - if (arg_type == type::none_type) return begin; - report_error("invalid format specifier"); - } - specs.type = pres_type; + if (!in(arg_type, set)) report_error("invalid format specifier"); + specs.set_type(pres_type); return begin + 1; } } parse_presentation_type{begin, specs, arg_type}; @@ -2423,43 +2504,31 @@ FMT_CONSTEXPR auto parse_format_specs(const Char* begin, const Char* end, case '>': case '^': enter_state(state::align); - specs.align = parse_align(c); + specs.set_align(parse_align(c)); ++begin; break; case '+': - case '-': + FMT_FALLTHROUGH; case ' ': - if (arg_type == type::none_type) return begin; + specs.set_sign(c == ' ' ? sign::space : sign::plus); + FMT_FALLTHROUGH; + case '-': enter_state(state::sign, in(arg_type, sint_set | float_set)); - switch (c) { - case '+': - specs.sign = sign::plus; - break; - case '-': - specs.sign = sign::minus; - break; - case ' ': - specs.sign = sign::space; - break; - } ++begin; break; case '#': - if (arg_type == type::none_type) return begin; enter_state(state::hash, is_arithmetic_type(arg_type)); - specs.alt = true; + specs.set_alt(); ++begin; break; case '0': enter_state(state::zero); - if (!is_arithmetic_type(arg_type)) { - if (arg_type == type::none_type) return begin; + if (!is_arithmetic_type(arg_type)) report_error("format specifier requires numeric argument"); - } - if (specs.align == align::none) { + if (specs.align() == align::none) { // Ignore 0 if align is specified for compatibility with std::format. - specs.align = align::numeric; - specs.fill = '0'; + specs.set_align(align::numeric); + specs.set_fill('0'); } ++begin; break; @@ -2474,52 +2543,49 @@ FMT_CONSTEXPR auto parse_format_specs(const Char* begin, const Char* end, case '9': case '{': enter_state(state::width); - begin = parse_dynamic_spec(begin, end, specs.width, specs.width_ref, ctx); + begin = parse_width(begin, end, specs, specs.width_ref, ctx); break; case '.': - if (arg_type == type::none_type) return begin; enter_state(state::precision, in(arg_type, float_set | string_set | cstring_set)); - begin = parse_precision(begin, end, specs.precision, specs.precision_ref, - ctx); + begin = parse_precision(begin, end, specs, specs.precision_ref, ctx); break; case 'L': - if (arg_type == type::none_type) return begin; enter_state(state::locale, is_arithmetic_type(arg_type)); - specs.localized = true; + specs.set_localized(); ++begin; break; case 'd': return parse_presentation_type(pres::dec, integral_set); case 'X': - specs.upper = true; + specs.set_upper(); FMT_FALLTHROUGH; case 'x': return parse_presentation_type(pres::hex, integral_set); case 'o': return parse_presentation_type(pres::oct, integral_set); case 'B': - specs.upper = true; + specs.set_upper(); FMT_FALLTHROUGH; case 'b': return parse_presentation_type(pres::bin, integral_set); case 'E': - specs.upper = true; + specs.set_upper(); FMT_FALLTHROUGH; case 'e': return parse_presentation_type(pres::exp, float_set); case 'F': - specs.upper = true; + specs.set_upper(); FMT_FALLTHROUGH; case 'f': return parse_presentation_type(pres::fixed, float_set); case 'G': - specs.upper = true; + specs.set_upper(); FMT_FALLTHROUGH; case 'g': return parse_presentation_type(pres::general, float_set); case 'A': - specs.upper = true; + specs.set_upper(); FMT_FALLTHROUGH; case 'a': return parse_presentation_type(pres::hexfloat, float_set); @@ -2548,11 +2614,11 @@ FMT_CONSTEXPR auto parse_format_specs(const Char* begin, const Char* end, report_error("invalid fill character '{'"); return begin; } - auto align = parse_align(to_ascii(*fill_end)); - enter_state(state::align, align != align::none); - specs.fill = - basic_string_view(begin, to_unsigned(fill_end - begin)); - specs.align = align; + auto alignment = parse_align(to_ascii(*fill_end)); + enter_state(state::align, alignment != align::none); + specs.set_fill( + basic_string_view(begin, to_unsigned(fill_end - begin))); + specs.set_align(alignment); begin = fill_end + 1; } } @@ -2562,39 +2628,53 @@ FMT_CONSTEXPR auto parse_format_specs(const Char* begin, const Char* end, } template -FMT_CONSTEXPR auto parse_replacement_field(const Char* begin, const Char* end, - Handler&& handler) -> const Char* { - struct id_adapter { - Handler& handler; - int arg_id; - - FMT_CONSTEXPR void on_auto() { arg_id = handler.on_arg_id(); } - FMT_CONSTEXPR void on_index(int id) { arg_id = handler.on_arg_id(id); } - FMT_CONSTEXPR void on_name(basic_string_view id) { - arg_id = handler.on_arg_id(id); - } - }; - +FMT_CONSTEXPR FMT_INLINE auto parse_replacement_field(const Char* begin, + const Char* end, + Handler&& handler) + -> const Char* { ++begin; - if (begin == end) return handler.on_error("invalid format string"), end; - if (*begin == '}') { + if (begin == end) { + handler.on_error("invalid format string"); + return end; + } + int arg_id = 0; + switch (*begin) { + case '}': handler.on_replacement_field(handler.on_arg_id(), begin); - } else if (*begin == '{') { + return begin + 1; + case '{': handler.on_text(begin, begin + 1); - } else { - auto adapter = id_adapter{handler, 0}; + return begin + 1; + case ':': + arg_id = handler.on_arg_id(); + break; + default: { + struct id_adapter { + Handler& handler; + int arg_id; + + FMT_CONSTEXPR void on_index(int id) { arg_id = handler.on_arg_id(id); } + FMT_CONSTEXPR void on_name(basic_string_view id) { + arg_id = handler.on_arg_id(id); + } + } adapter = {handler, 0}; begin = parse_arg_id(begin, end, adapter); + arg_id = adapter.arg_id; Char c = begin != end ? *begin : Char(); if (c == '}') { - handler.on_replacement_field(adapter.arg_id, begin); - } else if (c == ':') { - begin = handler.on_format_specs(adapter.arg_id, begin + 1, end); - if (begin == end || *begin != '}') - return handler.on_error("unknown format specifier"), end; - } else { - return handler.on_error("missing '}' in format string"), end; + handler.on_replacement_field(arg_id, begin); + return begin + 1; + } + if (c != ':') { + handler.on_error("missing '}' in format string"); + return end; } + break; } + } + begin = handler.on_format_specs(arg_id, begin + 1, end); + if (begin == end || *begin != '}') + return handler.on_error("unknown format specifier"), end; return begin + 1; } @@ -2680,13 +2760,15 @@ FMT_CONSTEXPR auto parse_format_specs(ParseContext& ctx) // Checks char specs and returns true iff the presentation type is char-like. FMT_CONSTEXPR inline auto check_char_specs(const format_specs& specs) -> bool { - if (specs.type != presentation_type::none && - specs.type != presentation_type::chr && - specs.type != presentation_type::debug) { + auto type = specs.type(); + if (type != presentation_type::none && type != presentation_type::chr && + type != presentation_type::debug) { return false; } - if (specs.align == align::numeric || specs.sign != sign::none || specs.alt) + if (specs.align() == align::numeric || specs.sign() != sign::none || + specs.alt()) { report_error("invalid format specifier for char"); + } return true; } @@ -2835,7 +2917,7 @@ template struct native_formatter { FMT_ENABLE_IF(U == type::string_type || U == type::cstring_type || U == type::char_type)> FMT_CONSTEXPR void set_debug_format(bool set = true) { - specs_.type = set ? presentation_type::debug : presentation_type::none; + specs_.set_type(set ? presentation_type::debug : presentation_type::none); } template diff --git a/third_party/fmt/include/fmt/chrono.h b/third_party/fmt/include/fmt/chrono.h index c93123fd..404cf40c 100644 --- a/third_party/fmt/include/fmt/chrono.h +++ b/third_party/fmt/include/fmt/chrono.h @@ -649,7 +649,7 @@ FMT_CONSTEXPR inline auto get_units() -> const char* { if (std::is_same::value) return "fs"; if (std::is_same::value) return "ps"; if (std::is_same::value) return "ns"; - if (std::is_same::value) return "µs"; + if (std::is_same::value) return use_utf8() ? "µs" : "us"; if (std::is_same::value) return "ms"; if (std::is_same::value) return "cs"; if (std::is_same::value) return "ds"; @@ -1147,7 +1147,7 @@ void write_fractional_seconds(OutputIt& out, Duration d, int precision = -1) { std::chrono::seconds::period>::value) { *out++ = '.'; out = detail::fill_n(out, leading_zeroes, '0'); - out = format_decimal(out, n, num_digits).end; + out = format_decimal(out, n, num_digits); } } else if (precision > 0) { *out++ = '.'; @@ -1158,12 +1158,12 @@ void write_fractional_seconds(OutputIt& out, Duration d, int precision = -1) { int num_truncated_digits = num_digits - remaining; n /= to_unsigned(detail::pow10(to_unsigned(num_truncated_digits))); if (n) { - out = format_decimal(out, n, remaining).end; + out = format_decimal(out, n, remaining); } return; } if (n) { - out = format_decimal(out, n, num_digits).end; + out = format_decimal(out, n, num_digits); remaining -= num_digits; } out = detail::fill_n(out, remaining, '0'); @@ -1319,7 +1319,7 @@ class tm_writer { const int num_digits = count_digits(n); if (width > num_digits) out_ = detail::fill_n(out_, width - num_digits, '0'); - out_ = format_decimal(out_, n, num_digits).end; + out_ = format_decimal(out_, n, num_digits); } void write_year(long long year) { if (year >= 0 && year < 10000) { @@ -1481,7 +1481,7 @@ class tm_writer { char buf[10]; size_t offset = 0; if (year >= 0 && year < 10000) { - copy2(buf, digits2(static_cast(year / 100))); + write2digits(buf, static_cast(year / 100)); } else { offset = 4; write_year_extended(year); @@ -1732,8 +1732,8 @@ template OutputIt { auto specs = format_specs(); specs.precision = precision; - specs.type = - precision >= 0 ? presentation_type::fixed : presentation_type::general; + specs.set_type(precision >= 0 ? presentation_type::fixed + : presentation_type::general); return write(out, val, specs); } @@ -1881,7 +1881,7 @@ struct chrono_formatter { if (width > num_digits) { out = detail::write_padding(out, pad, width - num_digits); } - out = format_decimal(out, n, num_digits).end; + out = format_decimal(out, n, num_digits); } void write_nan() { std::copy_n("nan", 3, out); } @@ -2251,14 +2251,16 @@ struct formatter, Char> { it = detail::parse_align(it, end, specs_); if (it == end) return it; - it = detail::parse_dynamic_spec(it, end, specs_.width, width_ref_, ctx); - if (it == end) return it; + Char c = *it; + if ((c >= '0' && c <= '9') || c == '{') { + it = detail::parse_width(it, end, specs_, width_ref_, ctx); + if (it == end) return it; + } auto checker = detail::chrono_format_checker(); if (*it == '.') { checker.has_precision_integral = !std::is_floating_point::value; - it = detail::parse_precision(it, end, specs_.precision, precision_ref_, - ctx); + it = detail::parse_precision(it, end, specs_, precision_ref_, ctx); } if (it != end && *it == 'L') { localized_ = true; @@ -2279,11 +2281,11 @@ struct formatter, Char> { // As a possible future optimization, we could avoid extra copying if width // is not specified. auto buf = basic_memory_buffer(); - auto out = std::back_inserter(buf); - detail::handle_dynamic_spec(specs.width, width_ref_, - ctx); - detail::handle_dynamic_spec(precision, - precision_ref_, ctx); + auto out = basic_appender(buf); + detail::handle_dynamic_spec(specs.dynamic_width(), specs.width, width_ref_, + ctx); + detail::handle_dynamic_spec(specs.dynamic_precision(), precision, + precision_ref_, ctx); if (begin == end || *begin == '}') { out = detail::format_duration_value(out, d.count(), precision); detail::format_duration_unit(out); @@ -2388,9 +2390,9 @@ template struct formatter { const Duration* subsecs) const -> decltype(ctx.out()) { auto specs = specs_; auto buf = basic_memory_buffer(); - auto out = std::back_inserter(buf); - detail::handle_dynamic_spec(specs.width, width_ref_, - ctx); + auto out = basic_appender(buf); + detail::handle_dynamic_spec(specs.dynamic_width(), specs.width, width_ref_, + ctx); auto loc_ref = ctx.locale(); detail::get_locale loc(static_cast(loc_ref), loc_ref); @@ -2410,8 +2412,11 @@ template struct formatter { it = detail::parse_align(it, end, specs_); if (it == end) return it; - it = detail::parse_dynamic_spec(it, end, specs_.width, width_ref_, ctx); - if (it == end) return it; + Char c = *it; + if ((c >= '0' && c <= '9') || c == '{') { + it = detail::parse_width(it, end, specs_, width_ref_, ctx); + if (it == end) return it; + } end = detail::parse_chrono_format(it, end, detail::tm_format_checker()); // Replace the default format_str only if the new spec is not empty. diff --git a/third_party/fmt/include/fmt/color.h b/third_party/fmt/include/fmt/color.h index f0e9dd94..231d93c8 100644 --- a/third_party/fmt/include/fmt/color.h +++ b/third_party/fmt/include/fmt/color.h @@ -560,31 +560,30 @@ struct formatter, Char> : formatter { auto format(const detail::styled_arg& arg, FormatContext& ctx) const -> decltype(ctx.out()) { const auto& ts = arg.style; - const auto& value = arg.value; auto out = ctx.out(); bool has_style = false; if (ts.has_emphasis()) { has_style = true; auto emphasis = detail::make_emphasis(ts.get_emphasis()); - out = std::copy(emphasis.begin(), emphasis.end(), out); + out = detail::copy(emphasis.begin(), emphasis.end(), out); } if (ts.has_foreground()) { has_style = true; auto foreground = detail::make_foreground_color(ts.get_foreground()); - out = std::copy(foreground.begin(), foreground.end(), out); + out = detail::copy(foreground.begin(), foreground.end(), out); } if (ts.has_background()) { has_style = true; auto background = detail::make_background_color(ts.get_background()); - out = std::copy(background.begin(), background.end(), out); + out = detail::copy(background.begin(), background.end(), out); } - out = formatter::format(value, ctx); + out = formatter::format(arg.value, ctx); if (has_style) { auto reset_color = string_view("\x1b[0m"); - out = std::copy(reset_color.begin(), reset_color.end(), out); + out = detail::copy(reset_color.begin(), reset_color.end(), out); } return out; } diff --git a/third_party/fmt/include/fmt/compile.h b/third_party/fmt/include/fmt/compile.h index b2afc2c3..36dd367d 100644 --- a/third_party/fmt/include/fmt/compile.h +++ b/third_party/fmt/include/fmt/compile.h @@ -21,12 +21,6 @@ FMT_EXPORT class compiled_string {}; namespace detail { -template -FMT_CONSTEXPR inline auto copy(InputIt begin, InputIt end, counting_iterator it) - -> counting_iterator { - return it + (end - begin); -} - template struct is_compiled_string : std::is_base_of {}; @@ -275,6 +269,7 @@ constexpr parse_specs_result parse_specs(basic_string_view str, } template struct arg_id_handler { + arg_id_kind kind; arg_ref arg_id; constexpr int on_auto() { @@ -282,25 +277,28 @@ template struct arg_id_handler { return 0; } constexpr int on_index(int id) { + kind = arg_id_kind::index; arg_id = arg_ref(id); return 0; } constexpr int on_name(basic_string_view id) { + kind = arg_id_kind::name; arg_id = arg_ref(id); return 0; } }; template struct parse_arg_id_result { + arg_id_kind kind; arg_ref arg_id; const Char* arg_id_end; }; template constexpr auto parse_arg_id(const Char* begin, const Char* end) { - auto handler = arg_id_handler{arg_ref{}}; + auto handler = arg_id_handler{arg_id_kind::none, arg_ref{}}; auto arg_id_end = parse_arg_id(begin, end, handler); - return parse_arg_id_result{handler.arg_id, arg_id_end}; + return parse_arg_id_result{handler.kind, handler.arg_id, arg_id_end}; } template struct field_type { @@ -363,18 +361,18 @@ constexpr auto compile_format_string(S fmt) { constexpr char_type c = arg_id_end_pos != str.size() ? str[arg_id_end_pos] : char_type(); static_assert(c == '}' || c == ':', "missing '}' in format string"); - if constexpr (arg_id_result.arg_id.kind == arg_id_kind::index) { + if constexpr (arg_id_result.kind == arg_id_kind::index) { static_assert( ID == manual_indexing_id || ID == 0, "cannot switch from automatic to manual argument indexing"); - constexpr auto arg_index = arg_id_result.arg_id.val.index; + constexpr auto arg_index = arg_id_result.arg_id.index; return parse_replacement_field_then_tail, Args, arg_id_end_pos, arg_index, manual_indexing_id>( fmt); - } else if constexpr (arg_id_result.arg_id.kind == arg_id_kind::name) { + } else if constexpr (arg_id_result.kind == arg_id_kind::name) { constexpr auto arg_index = - get_arg_index_by_name(arg_id_result.arg_id.val.name, Args{}); + get_arg_index_by_name(arg_id_result.arg_id.name, Args{}); if constexpr (arg_index >= 0) { constexpr auto next_id = ID != manual_indexing_id ? ID + 1 : manual_indexing_id; @@ -383,8 +381,7 @@ constexpr auto compile_format_string(S fmt) { arg_index, next_id>(fmt); } else if constexpr (c == '}') { return parse_tail( - runtime_named_field{arg_id_result.arg_id.val.name}, - fmt); + runtime_named_field{arg_id_result.arg_id.name}, fmt); } else if constexpr (c == ':') { return unknown_format(); // no type info for specs parsing } @@ -496,7 +493,9 @@ template ::value)> FMT_CONSTEXPR20 auto formatted_size(const S& fmt, const Args&... args) -> size_t { - return fmt::format_to(detail::counting_iterator(), fmt, args...).count(); + auto buf = detail::counting_buffer<>(); + fmt::format_to(appender(buf), fmt, args...); + return buf.count(); } template class fallback_file : public file_base { }; #ifndef FMT_USE_FALLBACK_FILE -# define FMT_USE_FALLBACK_FILE 1 +# define FMT_USE_FALLBACK_FILE 0 #endif template struct iterator_traits { + using iterator_category = output_iterator_tag; + using value_type = char; + using difference_type = ptrdiff_t; +}; +} // namespace std + #ifndef FMT_THROW # if FMT_EXCEPTIONS # if FMT_MSC_VERSION || defined(__NVCC__) @@ -515,7 +523,7 @@ template = 307 && !FMT_ICC_VERSION __attribute__((no_sanitize("undefined"))) #endif -inline auto +FMT_CONSTEXPR20 inline auto reserve(OutputIt it, size_t n) -> typename OutputIt::value_type* { auto& c = get_container(it); size_t size = c.size(); @@ -524,7 +532,8 @@ reserve(OutputIt it, size_t n) -> typename OutputIt::value_type* { } template -inline auto reserve(basic_appender it, size_t n) -> basic_appender { +FMT_CONSTEXPR20 inline auto reserve(basic_appender it, size_t n) + -> basic_appender { buffer& buf = get_container(it); buf.try_reserve(buf.size() + n); return it; @@ -543,7 +552,8 @@ template constexpr auto to_pointer(OutputIt, size_t) -> T* { return nullptr; } -template auto to_pointer(basic_appender it, size_t n) -> T* { +template +FMT_CONSTEXPR20 auto to_pointer(basic_appender it, size_t n) -> T* { buffer& buf = get_container(it); auto size = buf.size(); buf.try_reserve(size + n); @@ -921,7 +931,7 @@ class basic_memory_buffer : public detail::buffer { using detail::buffer::append; template - void append(const ContiguousRange& range) { + FMT_CONSTEXPR20 void append(const ContiguousRange& range) { append(range.data(), range.data() + range.size()); } }; @@ -1106,21 +1116,26 @@ using uint64_or_128_t = conditional_t() <= 64, uint64_t, uint128_t>; (factor) * 100000000, (factor) * 1000000000 // Converts value in the range [0, 100) to a string. -constexpr auto digits2(size_t value) -> const char* { - // GCC generates slightly better code when value is pointer-size. - return &"0001020304050607080910111213141516171819" - "2021222324252627282930313233343536373839" - "4041424344454647484950515253545556575859" - "6061626364656667686970717273747576777879" - "8081828384858687888990919293949596979899"[value * 2]; +// GCC generates slightly better code when value is pointer-size. +inline auto digits2(size_t value) -> const char* { + // Align data since unaligned access may be slower when crossing a + // hardware-specific boundary. + alignas(2) static const char data[] = + "0001020304050607080910111213141516171819" + "2021222324252627282930313233343536373839" + "4041424344454647484950515253545556575859" + "6061626364656667686970717273747576777879" + "8081828384858687888990919293949596979899"; + return &data[value * 2]; } // Sign is a template parameter to workaround a bug in gcc 4.8. -template constexpr auto sign(Sign s) -> Char { +template constexpr auto getsign(Sign s) -> Char { #if !FMT_GCC_VERSION || FMT_GCC_VERSION >= 604 - static_assert(std::is_same::value, ""); + static_assert(std::is_same::value, ""); #endif - return static_cast("\0-+ "[s]); + return static_cast(((' ' << 24) | ('+' << 16) | ('-' << 8)) >> + (static_cast(s) * 8)); } template FMT_CONSTEXPR auto count_digits_fallback(T n) -> int { @@ -1265,56 +1280,60 @@ inline auto equal2(const char* lhs, const char* rhs) -> bool { return memcmp(lhs, rhs, 2) == 0; } -// Copies two characters from src to dst. +// Writes a two-digit value to out. template -FMT_CONSTEXPR20 FMT_INLINE void copy2(Char* dst, const char* src) { - if (!is_constant_evaluated() && sizeof(Char) == sizeof(char)) { - memcpy(dst, src, 2); +FMT_CONSTEXPR20 FMT_INLINE void write2digits(Char* out, size_t value) { + if (!is_constant_evaluated() && std::is_same::value) { + memcpy(out, digits2(value), 2); return; } - *dst++ = static_cast(*src++); - *dst = static_cast(*src); + *out++ = static_cast('0' + value / 10); + *out = static_cast('0' + value % 10); } -template struct format_decimal_result { - Iterator begin; - Iterator end; -}; - -// Formats a decimal unsigned integer value writing into out pointing to a -// buffer of specified size. The caller must ensure that the buffer is large -// enough. +// Formats a decimal unsigned integer value writing to out pointing to a buffer +// of specified size. The caller must ensure that the buffer is large enough. template -FMT_CONSTEXPR20 auto format_decimal(Char* out, UInt value, int size) - -> format_decimal_result { +FMT_CONSTEXPR20 auto do_format_decimal(Char* out, UInt value, int size) + -> Char* { FMT_ASSERT(size >= count_digits(value), "invalid digit count"); - out += size; - Char* end = out; + unsigned n = to_unsigned(size); while (value >= 100) { // Integer division is slow so do it for a group of two digits instead // of for every digit. The idea comes from the talk by Alexandrescu // "Three Optimization Tips for C++". See speed-test for a comparison. - out -= 2; - copy2(out, digits2(static_cast(value % 100))); + n -= 2; + write2digits(out + n, static_cast(value % 100)); value /= 100; } - if (value < 10) { - *--out = static_cast('0' + value); - return {out, end}; + if (value >= 10) { + n -= 2; + write2digits(out + n, static_cast(value)); + } else { + out[--n] = static_cast('0' + value); } - out -= 2; - copy2(out, digits2(static_cast(value))); - return {out, end}; + return out + n; } -template >::value)> -FMT_CONSTEXPR inline auto format_decimal(Iterator out, UInt value, int size) - -> format_decimal_result { +template +FMT_CONSTEXPR FMT_INLINE auto format_decimal(Char* out, UInt value, + int num_digits) -> Char* { + do_format_decimal(out, value, num_digits); + return out + num_digits; +} + +template ::value)> +FMT_CONSTEXPR auto format_decimal(OutputIt out, UInt value, int num_digits) + -> OutputIt { + if (auto ptr = to_pointer(out, to_unsigned(num_digits))) { + do_format_decimal(ptr, value, num_digits); + return out; + } // Buffer is large enough to hold all digits (digits10 + 1). - Char buffer[digits10() + 1] = {}; - auto end = format_decimal(buffer, value, size).end; - return {out, detail::copy_noinline(buffer, end, out)}; + char buffer[digits10() + 1] = {}; + do_format_decimal(buffer, value, num_digits); + return detail::copy_noinline(buffer, buffer + num_digits, out); } template @@ -1331,9 +1350,10 @@ FMT_CONSTEXPR auto format_uint(Char* buffer, UInt value, int num_digits, return end; } -template -FMT_CONSTEXPR inline auto format_uint(It out, UInt value, int num_digits, - bool upper = false) -> It { +template ::value)> +FMT_CONSTEXPR inline auto format_uint(OutputIt out, UInt value, int num_digits, + bool upper = false) -> OutputIt { if (auto ptr = to_pointer(out, to_unsigned(num_digits))) { format_uint(ptr, value, num_digits, upper); return out; @@ -1401,10 +1421,12 @@ template class to_utf8 { if (policy == to_utf8_error_policy::abort) return false; buf.append(string_view("\xEF\xBF\xBD")); --p; + continue; } else { c = (c << 10) + static_cast(*p) - 0x35fdc00; } - } else if (c < 0x80) { + } + if (c < 0x80) { buf.push_back(static_cast(c)); } else if (c < 0x800) { buf.push_back(static_cast(0xc0 | (c >> 6))); @@ -1572,25 +1594,30 @@ template constexpr auto exponent_bias() -> int { } // Writes the exponent exp in the form "[+-]d{2,3}" to buffer. -template -FMT_CONSTEXPR auto write_exponent(int exp, It it) -> It { +template +FMT_CONSTEXPR auto write_exponent(int exp, OutputIt out) -> OutputIt { FMT_ASSERT(-10000 < exp && exp < 10000, "exponent out of range"); if (exp < 0) { - *it++ = static_cast('-'); + *out++ = static_cast('-'); exp = -exp; } else { - *it++ = static_cast('+'); - } - if (exp >= 100) { - const char* top = digits2(to_unsigned(exp / 100)); - if (exp >= 1000) *it++ = static_cast(top[0]); - *it++ = static_cast(top[1]); - exp %= 100; + *out++ = static_cast('+'); } - const char* d = digits2(to_unsigned(exp)); - *it++ = static_cast(d[0]); - *it++ = static_cast(d[1]); - return it; + auto uexp = static_cast(exp); + if (is_constant_evaluated()) { + if (uexp < 10) *out++ = '0'; + return format_decimal(out, uexp, count_digits(uexp)); + } + if (uexp >= 100u) { + const char* top = digits2(uexp / 100); + if (uexp >= 1000u) *out++ = static_cast(top[0]); + *out++ = static_cast(top[1]); + uexp %= 100; + } + const char* d = digits2(uexp); + *out++ = static_cast(d[0]); + *out++ = static_cast(d[1]); + return out; } // A floating-point number f * pow(2, e) where F is an unsigned type. @@ -1692,11 +1719,11 @@ constexpr auto convert_float(T value) -> convert_float_result { } template -FMT_NOINLINE FMT_CONSTEXPR auto fill(OutputIt it, size_t n, const fill_t& fill) - -> OutputIt { - auto fill_size = fill.size(); - if (fill_size == 1) return detail::fill_n(it, n, fill.template get()); - if (const Char* data = fill.template data()) { +FMT_NOINLINE FMT_CONSTEXPR auto fill(OutputIt it, size_t n, + const basic_specs& specs) -> OutputIt { + auto fill_size = specs.fill_size(); + if (fill_size == 1) return detail::fill_n(it, n, specs.fill_unit()); + if (const Char* data = specs.fill()) { for (size_t i = 0; i < n; ++i) it = copy(data, data + fill_size, it); } return it; @@ -1705,36 +1732,38 @@ FMT_NOINLINE FMT_CONSTEXPR auto fill(OutputIt it, size_t n, const fill_t& fill) // Writes the output of f, padded according to format specifications in specs. // size: output size in code units. // width: output display width in (terminal) column positions. -template FMT_CONSTEXPR auto write_padded(OutputIt out, const format_specs& specs, size_t size, size_t width, F&& f) -> OutputIt { - static_assert(align == align::left || align == align::right, ""); + static_assert(default_align == align::left || default_align == align::right, + ""); unsigned spec_width = to_unsigned(specs.width); size_t padding = spec_width > width ? spec_width - width : 0; // Shifts are encoded as string literals because static constexpr is not // supported in constexpr functions. - auto* shifts = align == align::left ? "\x1f\x1f\x00\x01" : "\x00\x1f\x00\x01"; - size_t left_padding = padding >> shifts[specs.align]; + auto* shifts = + default_align == align::left ? "\x1f\x1f\x00\x01" : "\x00\x1f\x00\x01"; + size_t left_padding = padding >> shifts[static_cast(specs.align())]; size_t right_padding = padding - left_padding; - auto it = reserve(out, size + padding * specs.fill.size()); - if (left_padding != 0) it = fill(it, left_padding, specs.fill); + auto it = reserve(out, size + padding * specs.fill_size()); + if (left_padding != 0) it = fill(it, left_padding, specs); it = f(it); - if (right_padding != 0) it = fill(it, right_padding, specs.fill); + if (right_padding != 0) it = fill(it, right_padding, specs); return base_iterator(out, it); } -template constexpr auto write_padded(OutputIt out, const format_specs& specs, size_t size, F&& f) -> OutputIt { - return write_padded(out, specs, size, size, f); + return write_padded(out, specs, size, size, f); } -template +template FMT_CONSTEXPR auto write_bytes(OutputIt out, string_view bytes, const format_specs& specs = {}) -> OutputIt { - return write_padded( + return write_padded( out, specs, bytes.size(), [bytes](reserve_iterator it) { const char* data = bytes.data(); return copy(data, data + bytes.size(), it); @@ -1905,7 +1934,7 @@ auto write_escaped_char(OutputIt out, Char v) -> OutputIt { template FMT_CONSTEXPR auto write_char(OutputIt out, Char value, const format_specs& specs) -> OutputIt { - bool is_debug = specs.type == presentation_type::debug; + bool is_debug = specs.type() == presentation_type::debug; return write_padded(out, specs, 1, [=](reserve_iterator it) { if (is_debug) return write_escaped_char(it, value); *it++ = value; @@ -1932,7 +1961,7 @@ template struct write_int_data { FMT_CONSTEXPR write_int_data(int num_digits, unsigned prefix, const format_specs& specs) : size((prefix >> 24) + to_unsigned(num_digits)), padding(0) { - if (specs.align == align::numeric) { + if (specs.align() == align::numeric) { auto width = to_unsigned(specs.width); if (width > size) { padding = width - size; @@ -2050,7 +2079,7 @@ auto write_int(OutputIt out, UInt value, unsigned prefix, static_assert(std::is_same, UInt>::value, ""); int num_digits = 0; auto buffer = memory_buffer(); - switch (specs.type) { + switch (specs.type()) { default: FMT_ASSERT(false, ""); FMT_FALLTHROUGH; @@ -2060,22 +2089,22 @@ auto write_int(OutputIt out, UInt value, unsigned prefix, format_decimal(appender(buffer), value, num_digits); break; case presentation_type::hex: - if (specs.alt) - prefix_append(prefix, unsigned(specs.upper ? 'X' : 'x') << 8 | '0'); + if (specs.alt()) + prefix_append(prefix, unsigned(specs.upper() ? 'X' : 'x') << 8 | '0'); num_digits = count_digits<4>(value); - format_uint<4, char>(appender(buffer), value, num_digits, specs.upper); + format_uint<4, char>(appender(buffer), value, num_digits, specs.upper()); break; case presentation_type::oct: num_digits = count_digits<3>(value); // Octal prefix '0' is counted as a digit, so only add it if precision // is not greater than the number of digits. - if (specs.alt && specs.precision <= num_digits && value != 0) + if (specs.alt() && specs.precision <= num_digits && value != 0) prefix_append(prefix, '0'); format_uint<3, char>(appender(buffer), value, num_digits); break; case presentation_type::bin: - if (specs.alt) - prefix_append(prefix, unsigned(specs.upper ? 'B' : 'b') << 8 | '0'); + if (specs.alt()) + prefix_append(prefix, unsigned(specs.upper() ? 'B' : 'b') << 8 | '0'); num_digits = count_digits<1>(value); format_uint<1, char>(appender(buffer), value, num_digits); break; @@ -2108,7 +2137,7 @@ template struct write_int_arg { }; template -FMT_CONSTEXPR auto make_write_int_arg(T value, sign_t sign) +FMT_CONSTEXPR auto make_write_int_arg(T value, sign s) -> write_int_arg> { auto prefix = 0u; auto abs_value = static_cast>(value); @@ -2118,7 +2147,7 @@ FMT_CONSTEXPR auto make_write_int_arg(T value, sign_t sign) } else { constexpr const unsigned prefixes[4] = {0, 0, 0x1000000u | '+', 0x1000000u | ' '}; - prefix = prefixes[sign]; + prefix = prefixes[static_cast(s)]; } return {abs_value, prefix}; } @@ -2132,7 +2161,7 @@ template struct loc_writer { template ::value)> auto operator()(T value) -> bool { - auto arg = make_write_int_arg(value, specs.sign); + auto arg = make_write_int_arg(value, specs.sign()); write_int(out, static_cast>(arg.abs_value), arg.prefix, specs, digit_grouping(grouping, sep)); return true; @@ -2151,7 +2180,7 @@ FMT_CONSTEXPR FMT_INLINE auto write_int(OutputIt out, write_int_arg arg, static_assert(std::is_same>::value, ""); auto abs_value = arg.abs_value; auto prefix = arg.prefix; - switch (specs.type) { + switch (specs.type()) { default: FMT_ASSERT(false, ""); FMT_FALLTHROUGH; @@ -2160,23 +2189,23 @@ FMT_CONSTEXPR FMT_INLINE auto write_int(OutputIt out, write_int_arg arg, int num_digits = count_digits(abs_value); return write_int( out, num_digits, prefix, specs, [=](reserve_iterator it) { - return format_decimal(it, abs_value, num_digits).end; + return format_decimal(it, abs_value, num_digits); }); } case presentation_type::hex: { - if (specs.alt) - prefix_append(prefix, unsigned(specs.upper ? 'X' : 'x') << 8 | '0'); + if (specs.alt()) + prefix_append(prefix, unsigned(specs.upper() ? 'X' : 'x') << 8 | '0'); int num_digits = count_digits<4>(abs_value); return write_int( out, num_digits, prefix, specs, [=](reserve_iterator it) { - return format_uint<4, Char>(it, abs_value, num_digits, specs.upper); + return format_uint<4, Char>(it, abs_value, num_digits, specs.upper()); }); } case presentation_type::oct: { int num_digits = count_digits<3>(abs_value); // Octal prefix '0' is counted as a digit, so only add it if precision // is not greater than the number of digits. - if (specs.alt && specs.precision <= num_digits && abs_value != 0) + if (specs.alt() && specs.precision <= num_digits && abs_value != 0) prefix_append(prefix, '0'); return write_int( out, num_digits, prefix, specs, [=](reserve_iterator it) { @@ -2184,8 +2213,8 @@ FMT_CONSTEXPR FMT_INLINE auto write_int(OutputIt out, write_int_arg arg, }); } case presentation_type::bin: { - if (specs.alt) - prefix_append(prefix, unsigned(specs.upper ? 'B' : 'b') << 8 | '0'); + if (specs.alt()) + prefix_append(prefix, unsigned(specs.upper() ? 'B' : 'b') << 8 | '0'); int num_digits = count_digits<1>(abs_value); return write_int( out, num_digits, prefix, specs, [=](reserve_iterator it) { @@ -2210,8 +2239,8 @@ template out, T value, const format_specs& specs, locale_ref loc) -> basic_appender { - if (specs.localized && write_loc(out, value, specs, loc)) return out; - return write_int_noinline(out, make_write_int_arg(value, specs.sign), + if (specs.localized() && write_loc(out, value, specs, loc)) return out; + return write_int_noinline(out, make_write_int_arg(value, specs.sign()), specs, loc); } // An inlined version of write used in format string compilation. @@ -2223,51 +2252,11 @@ template OutputIt { - if (specs.localized && write_loc(out, value, specs, loc)) return out; - return write_int(out, make_write_int_arg(value, specs.sign), specs, + if (specs.localized() && write_loc(out, value, specs, loc)) return out; + return write_int(out, make_write_int_arg(value, specs.sign()), specs, loc); } -// An output iterator that counts the number of objects written to it and -// discards them. -class counting_iterator { - private: - size_t count_; - - public: - using iterator_category = std::output_iterator_tag; - using difference_type = std::ptrdiff_t; - using pointer = void; - using reference = void; - FMT_UNCHECKED_ITERATOR(counting_iterator); - - struct value_type { - template FMT_CONSTEXPR void operator=(const T&) {} - }; - - FMT_CONSTEXPR counting_iterator() : count_(0) {} - - FMT_CONSTEXPR auto count() const -> size_t { return count_; } - - FMT_CONSTEXPR auto operator++() -> counting_iterator& { - ++count_; - return *this; - } - FMT_CONSTEXPR auto operator++(int) -> counting_iterator { - auto it = *this; - ++*this; - return it; - } - - FMT_CONSTEXPR friend auto operator+(counting_iterator it, difference_type n) - -> counting_iterator { - it.count_ += static_cast(n); - return it; - } - - FMT_CONSTEXPR auto operator*() const -> value_type { return {}; } -}; - template FMT_CONSTEXPR auto write(OutputIt out, basic_string_view s, const format_specs& specs) -> OutputIt { @@ -2275,10 +2264,14 @@ FMT_CONSTEXPR auto write(OutputIt out, basic_string_view s, auto size = s.size(); if (specs.precision >= 0 && to_unsigned(specs.precision) < size) size = code_point_index(s, to_unsigned(specs.precision)); - bool is_debug = specs.type == presentation_type::debug; + bool is_debug = specs.type() == presentation_type::debug; size_t width = 0; - if (is_debug) size = write_escaped_string(counting_iterator{}, s).count(); + if (is_debug) { + auto buf = counting_buffer(); + write_escaped_string(basic_appender(buf), s); + size = buf.count(); + } if (specs.width != 0) { if (is_debug) @@ -2301,7 +2294,7 @@ FMT_CONSTEXPR auto write(OutputIt out, template FMT_CONSTEXPR auto write(OutputIt out, const Char* s, const format_specs& specs, locale_ref) -> OutputIt { - if (specs.type == presentation_type::pointer) + if (specs.type() == presentation_type::pointer) return write_ptr(out, bit_cast(s), &specs); if (!s) report_error("string pointer is null"); return write(out, basic_string_view(s), specs, {}); @@ -2324,30 +2317,29 @@ FMT_CONSTEXPR auto write(OutputIt out, T value) -> OutputIt { return out; } if (negative) *out++ = static_cast('-'); - return format_decimal(out, abs_value, num_digits).end; + return format_decimal(out, abs_value, num_digits); } -// DEPRECATED! template FMT_CONSTEXPR auto parse_align(const Char* begin, const Char* end, format_specs& specs) -> const Char* { FMT_ASSERT(begin != end, ""); - auto align = align::none; + auto alignment = align::none; auto p = begin + code_point_length(begin); if (end - p <= 0) p = begin; for (;;) { switch (to_ascii(*p)) { case '<': - align = align::left; + alignment = align::left; break; case '>': - align = align::right; + alignment = align::right; break; case '^': - align = align::center; + alignment = align::center; break; } - if (align != align::none) { + if (alignment != align::none) { if (p != begin) { auto c = *begin; if (c == '}') return begin; @@ -2355,7 +2347,7 @@ FMT_CONSTEXPR auto parse_align(const Char* begin, const Char* end, report_error("invalid fill character '{'"); return begin; } - specs.fill = basic_string_view(begin, to_unsigned(p - begin)); + specs.set_fill(basic_string_view(begin, to_unsigned(p - begin))); begin = p + 1; } else { ++begin; @@ -2366,68 +2358,25 @@ FMT_CONSTEXPR auto parse_align(const Char* begin, const Char* end, } p = begin; } - specs.align = align; + specs.set_align(alignment); return begin; } -// A floating-point presentation format. -enum class float_format : unsigned char { - general, // General: exponent notation or fixed point based on magnitude. - exp, // Exponent notation with the default precision of 6, e.g. 1.2e-3. - fixed // Fixed point with the default precision of 6, e.g. 0.0012. -}; - -struct float_specs { - int precision; - float_format format : 8; - sign_t sign : 8; - bool locale : 1; - bool binary32 : 1; - bool showpoint : 1; -}; - -// DEPRECATED! -FMT_CONSTEXPR inline auto parse_float_type_spec(const format_specs& specs) - -> float_specs { - auto result = float_specs(); - result.showpoint = specs.alt; - result.locale = specs.localized; - switch (specs.type) { - default: - FMT_FALLTHROUGH; - case presentation_type::none: - result.format = float_format::general; - break; - case presentation_type::exp: - result.format = float_format::exp; - result.showpoint |= specs.precision != 0; - break; - case presentation_type::fixed: - result.format = float_format::fixed; - result.showpoint |= specs.precision != 0; - break; - case presentation_type::general: - result.format = float_format::general; - break; - } - return result; -} - template FMT_CONSTEXPR20 auto write_nonfinite(OutputIt out, bool isnan, - format_specs specs, sign_t sign) - -> OutputIt { + format_specs specs, sign s) -> OutputIt { auto str = - isnan ? (specs.upper ? "NAN" : "nan") : (specs.upper ? "INF" : "inf"); + isnan ? (specs.upper() ? "NAN" : "nan") : (specs.upper() ? "INF" : "inf"); constexpr size_t str_size = 3; - auto size = str_size + (sign ? 1 : 0); + auto size = str_size + (s != sign::none ? 1 : 0); // Replace '0'-padding with space for non-finite values. const bool is_zero_fill = - specs.fill.size() == 1 && specs.fill.template get() == '0'; - if (is_zero_fill) specs.fill = ' '; + specs.fill_size() == 1 && specs.fill_unit() == '0'; + if (is_zero_fill) specs.set_fill(' '); return write_padded(out, specs, size, [=](reserve_iterator it) { - if (sign) *it++ = detail::sign(sign); + if (s != sign::none) + *it++ = detail::getsign(s); return copy(str, str + str_size, it); }); } @@ -2455,7 +2404,7 @@ constexpr auto write_significand(OutputIt out, const char* significand, template inline auto write_significand(OutputIt out, UInt significand, int significand_size) -> OutputIt { - return format_decimal(out, significand, significand_size).end; + return format_decimal(out, significand, significand_size); } template FMT_CONSTEXPR20 auto write_significand(OutputIt out, T significand, @@ -2475,14 +2424,13 @@ template ::value)> inline auto write_significand(Char* out, UInt significand, int significand_size, int integral_size, Char decimal_point) -> Char* { - if (!decimal_point) - return format_decimal(out, significand, significand_size).end; + if (!decimal_point) return format_decimal(out, significand, significand_size); out += significand_size + 1; Char* end = out; int floating_size = significand_size - integral_size; for (int i = floating_size / 2; i > 0; --i) { out -= 2; - copy2(out, digits2(static_cast(significand % 100))); + write2digits(out, static_cast(significand % 100)); significand /= 100; } if (floating_size % 2 != 0) { @@ -2539,33 +2487,31 @@ FMT_CONSTEXPR20 auto write_significand(OutputIt out, T significand, template > FMT_CONSTEXPR20 auto do_write_float(OutputIt out, const DecimalFP& f, - const format_specs& specs, - float_specs fspecs, locale_ref loc) - -> OutputIt { + const format_specs& specs, sign s, + locale_ref loc) -> OutputIt { auto significand = f.significand; int significand_size = get_significand_size(f); const Char zero = static_cast('0'); - auto sign = fspecs.sign; - size_t size = to_unsigned(significand_size) + (sign ? 1 : 0); + size_t size = to_unsigned(significand_size) + (s != sign::none ? 1 : 0); using iterator = reserve_iterator; - Char decimal_point = - fspecs.locale ? detail::decimal_point(loc) : static_cast('.'); + Char decimal_point = specs.localized() ? detail::decimal_point(loc) + : static_cast('.'); int output_exp = f.exponent + significand_size - 1; auto use_exp_format = [=]() { - if (fspecs.format == float_format::exp) return true; - if (fspecs.format != float_format::general) return false; + if (specs.type() == presentation_type::exp) return true; + if (specs.type() == presentation_type::fixed) return false; // Use the fixed notation if the exponent is in [exp_lower, exp_upper), // e.g. 0.0001 instead of 1e-04. Otherwise use the exponent notation. const int exp_lower = -4, exp_upper = 16; return output_exp < exp_lower || - output_exp >= (fspecs.precision > 0 ? fspecs.precision : exp_upper); + output_exp >= (specs.precision > 0 ? specs.precision : exp_upper); }; if (use_exp_format()) { int num_zeros = 0; - if (fspecs.showpoint) { - num_zeros = fspecs.precision - significand_size; + if (specs.alt()) { + num_zeros = specs.precision - significand_size; if (num_zeros < 0) num_zeros = 0; size += to_unsigned(num_zeros); } else if (significand_size == 1) { @@ -2576,9 +2522,9 @@ FMT_CONSTEXPR20 auto do_write_float(OutputIt out, const DecimalFP& f, if (abs_output_exp >= 100) exp_digits = abs_output_exp >= 1000 ? 4 : 3; size += to_unsigned((decimal_point ? 1 : 0) + 2 + exp_digits); - char exp_char = specs.upper ? 'E' : 'e'; + char exp_char = specs.upper() ? 'E' : 'e'; auto write = [=](iterator it) { - if (sign) *it++ = detail::sign(sign); + if (s != sign::none) *it++ = detail::getsign(s); // Insert a decimal point after the first digit and add an exponent. it = write_significand(it, significand, significand_size, 1, decimal_point); @@ -2595,31 +2541,32 @@ FMT_CONSTEXPR20 auto do_write_float(OutputIt out, const DecimalFP& f, if (f.exponent >= 0) { // 1234e5 -> 123400000[.0+] size += to_unsigned(f.exponent); - int num_zeros = fspecs.precision - exp; + int num_zeros = specs.precision - exp; abort_fuzzing_if(num_zeros > 5000); - if (fspecs.showpoint) { + if (specs.alt()) { ++size; - if (num_zeros <= 0 && fspecs.format != float_format::fixed) num_zeros = 0; + if (num_zeros <= 0 && specs.type() != presentation_type::fixed) + num_zeros = 0; if (num_zeros > 0) size += to_unsigned(num_zeros); } - auto grouping = Grouping(loc, fspecs.locale); + auto grouping = Grouping(loc, specs.localized()); size += to_unsigned(grouping.count_separators(exp)); return write_padded(out, specs, size, [&](iterator it) { - if (sign) *it++ = detail::sign(sign); + if (s != sign::none) *it++ = detail::getsign(s); it = write_significand(it, significand, significand_size, f.exponent, grouping); - if (!fspecs.showpoint) return it; + if (!specs.alt()) return it; *it++ = decimal_point; return num_zeros > 0 ? detail::fill_n(it, num_zeros, zero) : it; }); } else if (exp > 0) { // 1234e-2 -> 12.34[0+] - int num_zeros = fspecs.showpoint ? fspecs.precision - significand_size : 0; + int num_zeros = specs.alt() ? specs.precision - significand_size : 0; size += 1 + to_unsigned(num_zeros > 0 ? num_zeros : 0); - auto grouping = Grouping(loc, fspecs.locale); + auto grouping = Grouping(loc, specs.localized()); size += to_unsigned(grouping.count_separators(exp)); return write_padded(out, specs, size, [&](iterator it) { - if (sign) *it++ = detail::sign(sign); + if (s != sign::none) *it++ = detail::getsign(s); it = write_significand(it, significand, significand_size, exp, decimal_point, grouping); return num_zeros > 0 ? detail::fill_n(it, num_zeros, zero) : it; @@ -2627,14 +2574,14 @@ FMT_CONSTEXPR20 auto do_write_float(OutputIt out, const DecimalFP& f, } // 1234e-6 -> 0.001234 int num_zeros = -exp; - if (significand_size == 0 && fspecs.precision >= 0 && - fspecs.precision < num_zeros) { - num_zeros = fspecs.precision; + if (significand_size == 0 && specs.precision >= 0 && + specs.precision < num_zeros) { + num_zeros = specs.precision; } - bool pointy = num_zeros != 0 || significand_size != 0 || fspecs.showpoint; + bool pointy = num_zeros != 0 || significand_size != 0 || specs.alt(); size += 1 + (pointy ? 1 : 0) + to_unsigned(num_zeros); return write_padded(out, specs, size, [&](iterator it) { - if (sign) *it++ = detail::sign(sign); + if (s != sign::none) *it++ = detail::getsign(s); *it++ = zero; if (!pointy) return it; *it++ = decimal_point; @@ -2659,14 +2606,13 @@ template class fallback_digit_grouping { template FMT_CONSTEXPR20 auto write_float(OutputIt out, const DecimalFP& f, - const format_specs& specs, float_specs fspecs, + const format_specs& specs, sign s, locale_ref loc) -> OutputIt { if (is_constant_evaluated()) { return do_write_float>(out, f, specs, fspecs, - loc); + fallback_digit_grouping>(out, f, specs, s, loc); } else { - return do_write_float(out, f, specs, fspecs, loc); + return do_write_float(out, f, specs, s, loc); } } @@ -3130,18 +3076,17 @@ FMT_CONSTEXPR20 void format_hexfloat(Float value, format_specs specs, // Assume Float is in the format [sign][exponent][significand]. using carrier_uint = typename info::carrier_uint; - constexpr auto num_float_significand_bits = - detail::num_significand_bits(); + const auto num_float_significand_bits = detail::num_significand_bits(); basic_fp f(value); f.e += num_float_significand_bits; if (!has_implicit_bit()) --f.e; - constexpr auto num_fraction_bits = + const auto num_fraction_bits = num_float_significand_bits + (has_implicit_bit() ? 1 : 0); - constexpr auto num_xdigits = (num_fraction_bits + 3) / 4; + const auto num_xdigits = (num_fraction_bits + 3) / 4; - constexpr auto leading_shift = ((num_xdigits - 1) * 4); + const auto leading_shift = ((num_xdigits - 1) * 4); const auto leading_mask = carrier_uint(0xF) << leading_shift; const auto leading_xdigit = static_cast((f.f & leading_mask) >> leading_shift); @@ -3173,20 +3118,20 @@ FMT_CONSTEXPR20 void format_hexfloat(Float value, format_specs specs, char xdigits[num_bits() / 4]; detail::fill_n(xdigits, sizeof(xdigits), '0'); - format_uint<4>(xdigits, f.f, num_xdigits, specs.upper); + format_uint<4>(xdigits, f.f, num_xdigits, specs.upper()); // Remove zero tail while (print_xdigits > 0 && xdigits[print_xdigits] == '0') --print_xdigits; buf.push_back('0'); - buf.push_back(specs.upper ? 'X' : 'x'); + buf.push_back(specs.upper() ? 'X' : 'x'); buf.push_back(xdigits[0]); - if (specs.alt || print_xdigits > 0 || print_xdigits < specs.precision) + if (specs.alt() || print_xdigits > 0 || print_xdigits < specs.precision) buf.push_back('.'); buf.append(xdigits + 1, xdigits + 1 + print_xdigits); for (; print_xdigits < specs.precision; ++print_xdigits) buf.push_back('0'); - buf.push_back(specs.upper ? 'P' : 'p'); + buf.push_back(specs.upper() ? 'P' : 'p'); uint32_t abs_e; if (f.e < 0) { @@ -3217,15 +3162,15 @@ constexpr auto fractional_part_rounding_thresholds(int index) -> uint32_t { } template -FMT_CONSTEXPR20 auto format_float(Float value, int precision, float_specs specs, +FMT_CONSTEXPR20 auto format_float(Float value, int precision, + const format_specs& specs, bool binary32, buffer& buf) -> int { // float is passed as double to reduce the number of instantiations. static_assert(!std::is_same::value, ""); - FMT_ASSERT(value >= 0, "value is negative"); auto converted_value = convert_float(value); - const bool fixed = specs.format == float_format::fixed; - if (value <= 0) { // <= instead of == to silence a warning. + const bool fixed = specs.type() == presentation_type::fixed; + if (value == 0) { if (precision <= 0 || !fixed) { buf.push_back('0'); return 0; @@ -3250,16 +3195,6 @@ FMT_CONSTEXPR20 auto format_float(Float value, int precision, float_specs specs, exp = static_cast(e); if (e > exp) ++exp; // Compute ceil. dragon_flags = dragon::fixup; - } else if (precision < 0) { - // Use Dragonbox for the shortest format. - if (specs.binary32) { - auto dec = dragonbox::to_decimal(static_cast(value)); - write(appender(buf), dec.significand); - return dec.exponent; - } - auto dec = dragonbox::to_decimal(static_cast(value)); - write(appender(buf), dec.significand); - return dec.exponent; } else { // Extract significand bits and exponent bits. using info = dragonbox::float_info; @@ -3386,7 +3321,7 @@ FMT_CONSTEXPR20 auto format_float(Float value, int precision, float_specs specs, // for details. prod = ((subsegment * static_cast(450359963)) >> 20) + 1; digits = static_cast(prod >> 32); - copy2(buffer, digits2(digits)); + write2digits(buffer, digits); number_of_digits_printed += 2; } @@ -3394,7 +3329,7 @@ FMT_CONSTEXPR20 auto format_float(Float value, int precision, float_specs specs, while (number_of_digits_printed < number_of_digits_to_print) { prod = static_cast(prod) * static_cast(100); digits = static_cast(prod >> 32); - copy2(buffer + number_of_digits_printed, digits2(digits)); + write2digits(buffer + number_of_digits_printed, digits); number_of_digits_printed += 2; } }; @@ -3503,9 +3438,8 @@ FMT_CONSTEXPR20 auto format_float(Float value, int precision, float_specs specs, } if (use_dragon) { auto f = basic_fp(); - bool is_predecessor_closer = specs.binary32 - ? f.assign(static_cast(value)) - : f.assign(converted_value); + bool is_predecessor_closer = binary32 ? f.assign(static_cast(value)) + : f.assign(converted_value); if (is_predecessor_closer) dragon_flags |= dragon::predecessor_closer; if (fixed) dragon_flags |= dragon::fixed; // Limit precision to the maximum possible number of significant digits in @@ -3514,7 +3448,7 @@ FMT_CONSTEXPR20 auto format_float(Float value, int precision, float_specs specs, if (precision > max_double_digits) precision = max_double_digits; format_dragon(f, dragon_flags, precision, buf, exp); } - if (!fixed && !specs.showpoint) { + if (!fixed && !specs.alt()) { // Remove trailing zeros. auto num_digits = buf.size(); while (num_digits > 0 && buf[num_digits - 1] == '0') { @@ -3529,51 +3463,55 @@ FMT_CONSTEXPR20 auto format_float(Float value, int precision, float_specs specs, template FMT_CONSTEXPR20 auto write_float(OutputIt out, T value, format_specs specs, locale_ref loc) -> OutputIt { - sign_t sign = specs.sign; - if (detail::signbit(value)) { // value < 0 is false for NaN so use signbit. - sign = sign::minus; - value = -value; - } else if (sign == sign::minus) { - sign = sign::none; - } + // Use signbit because value < 0 is false for NaN. + sign s = detail::signbit(value) ? sign::minus : specs.sign(); if (!detail::isfinite(value)) - return write_nonfinite(out, detail::isnan(value), specs, sign); + return write_nonfinite(out, detail::isnan(value), specs, s); - if (specs.align == align::numeric && sign) { - auto it = reserve(out, 1); - *it++ = detail::sign(sign); - out = base_iterator(out, it); - sign = sign::none; + if (specs.align() == align::numeric && s != sign::none) { + *out++ = detail::getsign(s); + s = sign::none; if (specs.width != 0) --specs.width; } + int precision = specs.precision; + if (precision < 0) { + if (specs.type() != presentation_type::none) { + precision = 6; + } else if (is_fast_float::value && !is_constant_evaluated()) { + // Use Dragonbox for the shortest format. + using floaty = conditional_t= sizeof(double), double, float>; + auto dec = dragonbox::to_decimal(static_cast(value)); + return write_float(out, dec, specs, s, loc); + } + } + memory_buffer buffer; - if (specs.type == presentation_type::hexfloat) { - if (sign) buffer.push_back(detail::sign(sign)); + if (specs.type() == presentation_type::hexfloat) { + if (s != sign::none) buffer.push_back(detail::getsign(s)); format_hexfloat(convert_float(value), specs, buffer); return write_bytes(out, {buffer.data(), buffer.size()}, specs); } - int precision = specs.precision >= 0 || specs.type == presentation_type::none - ? specs.precision - : 6; - if (specs.type == presentation_type::exp) { + if (specs.type() == presentation_type::exp) { if (precision == max_value()) report_error("number is too big"); else ++precision; - } else if (specs.type != presentation_type::fixed && precision == 0) { + if (specs.precision != 0) specs.set_alt(); + } else if (specs.type() == presentation_type::fixed) { + if (specs.precision != 0) specs.set_alt(); + } else if (precision == 0) { precision = 1; } - float_specs fspecs = parse_float_type_spec(specs); - fspecs.sign = sign; - if (const_check(std::is_same())) fspecs.binary32 = true; - int exp = format_float(convert_float(value), precision, fspecs, buffer); - fspecs.precision = precision; + int exp = format_float(convert_float(value), precision, specs, + std::is_same(), buffer); + + specs.precision = precision; auto f = big_decimal_fp{buffer.data(), static_cast(buffer.size()), exp}; - return write_float(out, f, specs, fspecs, loc); + return write_float(out, f, specs, s, loc); } template OutputIt { if (const_check(!is_supported_floating_point(value))) return out; - return specs.localized && write_loc(out, value, specs, loc) + return specs.localized() && write_loc(out, value, specs, loc) ? out : write_float(out, value, specs, loc); } @@ -3592,23 +3530,17 @@ FMT_CONSTEXPR20 auto write(OutputIt out, T value) -> OutputIt { if (is_constant_evaluated()) return write(out, value, format_specs()); if (const_check(!is_supported_floating_point(value))) return out; - auto sign = sign_t::none; - if (detail::signbit(value)) { - sign = sign::minus; - value = -value; - } + auto s = detail::signbit(value) ? sign::minus : sign::none; constexpr auto specs = format_specs(); - using floaty = conditional_t::value, double, T>; + using floaty = conditional_t= sizeof(double), double, float>; using floaty_uint = typename dragonbox::float_info::carrier_uint; floaty_uint mask = exponent_mask(); if ((bit_cast(value) & mask) == mask) - return write_nonfinite(out, std::isnan(value), specs, sign); + return write_nonfinite(out, std::isnan(value), specs, s); - auto fspecs = float_specs(); - fspecs.sign = sign; auto dec = dragonbox::to_decimal(static_cast(value)); - return write_float(out, dec, specs, fspecs, {}); + return write_float(out, dec, specs, s, {}); } template ::value)> FMT_CONSTEXPR auto write(OutputIt out, T value, const format_specs& specs = {}, locale_ref = {}) -> OutputIt { - return specs.type != presentation_type::none && - specs.type != presentation_type::string + return specs.type() != presentation_type::none && + specs.type() != presentation_type::string ? write(out, value ? 1 : 0, specs, {}) : write_bytes(out, value ? "true" : "false", specs); } @@ -3709,21 +3641,19 @@ FMT_CONSTEXPR auto write(OutputIt out, const T& value) // An argument visitor that formats the argument and writes it via the output // iterator. It's a class and not a generic lambda for compatibility with C++11. template struct default_arg_formatter { - using iterator = basic_appender; using context = buffered_context; - iterator out; - basic_format_args args; - locale_ref loc; + basic_appender out; - template auto operator()(T value) -> iterator { - return write(out, value); - } - auto operator()(typename basic_format_arg::handle h) -> iterator { - basic_format_parse_context parse_ctx({}); - context format_ctx(out, args, loc); + void operator()(monostate) { report_error("argument not found"); } + + template void operator()(T value) { write(out, value); } + + void operator()(typename basic_format_arg::handle h) { + // Use a null locale since the default format must be unlocalized. + auto parse_ctx = basic_format_parse_context({}); + auto format_ctx = context(out, {}, {}); h.format(parse_ctx, format_ctx); - return format_ctx.out(); } }; @@ -3746,62 +3676,45 @@ template struct arg_formatter { } }; -struct width_checker { +struct dynamic_spec_getter { template ::value)> FMT_CONSTEXPR auto operator()(T value) -> unsigned long long { - if (is_negative(value)) report_error("negative width"); - return static_cast(value); + return is_negative(value) ? ~0ull : static_cast(value); } template ::value)> FMT_CONSTEXPR auto operator()(T) -> unsigned long long { - report_error("width is not integer"); + report_error("width/precision is not integer"); return 0; } }; -struct precision_checker { - template ::value)> - FMT_CONSTEXPR auto operator()(T value) -> unsigned long long { - if (is_negative(value)) report_error("negative precision"); - return static_cast(value); - } - - template ::value)> - FMT_CONSTEXPR auto operator()(T) -> unsigned long long { - report_error("precision is not integer"); - return 0; - } -}; - -template -FMT_CONSTEXPR auto get_dynamic_spec(FormatArg arg) -> int { - unsigned long long value = arg.visit(Handler()); - if (value > to_unsigned(max_value())) report_error("number is too big"); - return static_cast(value); -} - template -FMT_CONSTEXPR auto get_arg(Context& ctx, ID id) -> decltype(ctx.arg(id)) { +FMT_CONSTEXPR auto get_arg(Context& ctx, ID id) -> basic_format_arg { auto arg = ctx.arg(id); if (!arg) report_error("argument not found"); return arg; } -template -FMT_CONSTEXPR void handle_dynamic_spec(int& value, - arg_ref ref, - Context& ctx) { - switch (ref.kind) { - case arg_id_kind::none: - break; - case arg_id_kind::index: - value = detail::get_dynamic_spec(get_arg(ctx, ref.val.index)); - break; - case arg_id_kind::name: - value = detail::get_dynamic_spec(get_arg(ctx, ref.val.name)); - break; - } +template +FMT_CONSTEXPR int get_dynamic_spec( + arg_id_kind kind, const arg_ref& ref, + Context& ctx) { + FMT_ASSERT(kind != arg_id_kind::none, ""); + auto arg = + kind == arg_id_kind::index ? ctx.arg(ref.index) : ctx.arg(ref.name); + if (!arg) report_error("argument not found"); + unsigned long long value = arg.visit(dynamic_spec_getter()); + if (value > to_unsigned(max_value())) + report_error("width/precision is out of range"); + return static_cast(value); +} + +template +FMT_CONSTEXPR void handle_dynamic_spec( + arg_id_kind kind, int& value, + const arg_ref& ref, Context& ctx) { + if (kind != arg_id_kind::none) value = get_dynamic_spec(kind, ref, ctx); } #if FMT_USE_USER_DEFINED_LITERALS @@ -3918,7 +3831,7 @@ class format_int { template FMT_CONSTEXPR20 auto format_unsigned(UInt value) -> char* { auto n = static_cast>(value); - return detail::format_decimal(buffer_, n, buffer_size - 1).begin; + return detail::do_format_decimal(buffer_, n, buffer_size - 1); } template @@ -4056,10 +3969,10 @@ template <> struct formatter { template auto format(bytes b, FormatContext& ctx) const -> decltype(ctx.out()) { auto specs = specs_; - detail::handle_dynamic_spec(specs.width, - specs.width_ref, ctx); - detail::handle_dynamic_spec( - specs.precision, specs.precision_ref, ctx); + detail::handle_dynamic_spec(specs.dynamic_width(), specs.width, + specs.width_ref, ctx); + detail::handle_dynamic_spec(specs.dynamic_precision(), specs.precision, + specs.precision_ref, ctx); return detail::write_bytes(ctx.out(), b.data_, specs); } }; @@ -4097,11 +4010,11 @@ template struct formatter> : formatter { auto format(group_digits_view t, FormatContext& ctx) const -> decltype(ctx.out()) { auto specs = specs_; - detail::handle_dynamic_spec(specs.width, - specs.width_ref, ctx); - detail::handle_dynamic_spec( - specs.precision, specs.precision_ref, ctx); - auto arg = detail::make_write_int_arg(t.value, specs.sign); + detail::handle_dynamic_spec(specs.dynamic_width(), specs.width, + specs.width_ref, ctx); + detail::handle_dynamic_spec(specs.dynamic_precision(), specs.precision, + specs.precision_ref, ctx); + auto arg = detail::make_write_int_arg(t.value, specs.sign()); return detail::write_int( ctx.out(), static_cast>(arg.abs_value), arg.prefix, specs, detail::digit_grouping("\3", ",")); @@ -4128,22 +4041,26 @@ struct formatter, Char> { template struct nested_formatter { private: + basic_specs specs_; int width_; - detail::fill_t fill_; - align_t align_ : 4; formatter formatter_; public: - constexpr nested_formatter() : width_(0), align_(align_t::none) {} + constexpr nested_formatter() : width_(0) {} FMT_CONSTEXPR auto parse(basic_format_parse_context& ctx) -> decltype(ctx.begin()) { - auto specs = detail::dynamic_format_specs(); - auto it = parse_format_specs(ctx.begin(), ctx.end(), specs, ctx, - detail::type::none_type); - width_ = specs.width; - fill_ = specs.fill; - align_ = specs.align; + auto it = ctx.begin(), end = ctx.end(); + if (it == end) return it; + auto specs = format_specs(); + it = detail::parse_align(it, end, specs); + specs_ = specs; + Char c = *it; + auto width_ref = detail::arg_ref(); + if ((c >= '0' && c <= '9') || c == '{') { + it = detail::parse_width(it, end, specs, width_ref, ctx); + width_ = specs.width; + } ctx.advance_to(it); return formatter_.parse(ctx); } @@ -4155,8 +4072,9 @@ template struct nested_formatter { write(basic_appender(buf)); auto specs = format_specs(); specs.width = width_; - specs.fill = fill_; - specs.align = align_; + specs.set_fill( + basic_string_view(specs_.fill(), specs_.fill_size())); + specs.set_align(specs_.align()); return detail::write( ctx.out(), basic_string_view(buf.data(), buf.size()), specs); } @@ -4209,73 +4127,64 @@ FMT_END_EXPORT namespace detail { -template -void vformat_to(buffer& buf, basic_string_view fmt, - typename vformat_args::type args, locale_ref loc) { - auto out = basic_appender(buf); - if (fmt.size() == 2 && equal2(fmt.data(), "{}")) { - auto arg = args.get(0); - if (!arg) report_error("argument not found"); - arg.visit(default_arg_formatter{out, args, loc}); - return; +template struct format_handler { + basic_format_parse_context parse_context; + buffered_context context; + + void on_text(const Char* begin, const Char* end) { + copy_noinline(begin, end, context.out()); } - struct format_handler { - basic_format_parse_context parse_context; - buffered_context context; + FMT_CONSTEXPR auto on_arg_id() -> int { return parse_context.next_arg_id(); } + FMT_CONSTEXPR auto on_arg_id(int id) -> int { + parse_context.check_arg_id(id); + return id; + } + FMT_CONSTEXPR auto on_arg_id(basic_string_view id) -> int { + parse_context.check_arg_id(id); + int arg_id = context.arg_id(id); + if (arg_id < 0) report_error("argument not found"); + return arg_id; + } - format_handler(basic_appender p_out, basic_string_view str, - basic_format_args> p_args, - locale_ref p_loc) - : parse_context(str), context(p_out, p_args, p_loc) {} + FMT_INLINE void on_replacement_field(int id, const Char*) { + context.arg(id).visit(default_arg_formatter{context.out()}); + } - void on_text(const Char* begin, const Char* end) { - auto text = basic_string_view(begin, to_unsigned(end - begin)); - context.advance_to(write(context.out(), text)); - } + auto on_format_specs(int id, const Char* begin, const Char* end) + -> const Char* { + auto arg = get_arg(context, id); + // Not using a visitor for custom types gives better codegen. + if (arg.format_custom(begin, parse_context, context)) + return parse_context.begin(); - FMT_CONSTEXPR auto on_arg_id() -> int { - return parse_context.next_arg_id(); - } - FMT_CONSTEXPR auto on_arg_id(int id) -> int { - parse_context.check_arg_id(id); - return id; - } - FMT_CONSTEXPR auto on_arg_id(basic_string_view id) -> int { - parse_context.check_arg_id(id); - int arg_id = context.arg_id(id); - if (arg_id < 0) report_error("argument not found"); - return arg_id; + auto specs = dynamic_format_specs(); + begin = parse_format_specs(begin, end, specs, parse_context, arg.type()); + if (specs.dynamic()) { + handle_dynamic_spec(specs.dynamic_width(), specs.width, specs.width_ref, + context); + handle_dynamic_spec(specs.dynamic_precision(), specs.precision, + specs.precision_ref, context); } - FMT_INLINE void on_replacement_field(int id, const Char*) { - auto arg = get_arg(context, id); - context.advance_to(arg.visit(default_arg_formatter{ - context.out(), context.args(), context.locale()})); - } + if (begin == end || *begin != '}') + report_error("missing '}' in format string"); + arg.visit(arg_formatter{context.out(), specs, context.locale()}); + return begin; + } - auto on_format_specs(int id, const Char* begin, const Char* end) - -> const Char* { - auto arg = get_arg(context, id); - // Not using a visitor for custom types gives better codegen. - if (arg.format_custom(begin, parse_context, context)) - return parse_context.begin(); - auto specs = detail::dynamic_format_specs(); - begin = parse_format_specs(begin, end, specs, parse_context, arg.type()); - detail::handle_dynamic_spec( - specs.width, specs.width_ref, context); - detail::handle_dynamic_spec( - specs.precision, specs.precision_ref, context); - if (begin == end || *begin != '}') - report_error("missing '}' in format string"); - context.advance_to(arg.visit( - arg_formatter{context.out(), specs, context.locale()})); - return begin; - } + FMT_NORETURN void on_error(const char* message) { report_error(message); } +}; - FMT_NORETURN void on_error(const char* message) { report_error(message); } - }; - detail::parse_format_string(fmt, format_handler(out, fmt, args, loc)); +template +void vformat_to(buffer& buf, basic_string_view fmt, + typename vformat_args::type args, locale_ref loc) { + auto out = basic_appender(buf); + if (fmt.size() == 2 && equal2(fmt.data(), "{}")) + return args.get(0).visit(default_arg_formatter{out}); + parse_format_string( + fmt, format_handler{basic_format_parse_context(fmt), + {out, args, loc}}); } FMT_BEGIN_EXPORT @@ -4298,14 +4207,13 @@ template template FMT_CONSTEXPR FMT_INLINE auto native_formatter::format( const T& val, FormatContext& ctx) const -> decltype(ctx.out()) { - if (specs_.width_ref.kind == arg_id_kind::none && - specs_.precision_ref.kind == arg_id_kind::none) { + if (!specs_.dynamic()) return write(ctx.out(), val, specs_, ctx.locale()); - } - auto specs = specs_; - handle_dynamic_spec(specs.width, specs.width_ref, ctx); - handle_dynamic_spec(specs.precision, specs.precision_ref, - ctx); + auto specs = format_specs(specs_); + handle_dynamic_spec(specs.dynamic_width(), specs.width, specs_.width_ref, + ctx); + handle_dynamic_spec(specs.dynamic_precision(), specs.precision, + specs_.precision_ref, ctx); return write(ctx.out(), val, specs, ctx.locale()); } diff --git a/third_party/fmt/include/fmt/os.h b/third_party/fmt/include/fmt/os.h index 5c85ea08..974c5c20 100644 --- a/third_party/fmt/include/fmt/os.h +++ b/third_party/fmt/include/fmt/os.h @@ -383,7 +383,7 @@ class file_buffer final : public buffer { } // namespace detail -constexpr auto buffer_size = detail::buffer_size(); +FMT_INLINE_VARIABLE constexpr auto buffer_size = detail::buffer_size(); /// A fast output stream for writing from a single thread. Writing from /// multiple threads without external synchronization may result in a data race. diff --git a/third_party/fmt/include/fmt/printf.h b/third_party/fmt/include/fmt/printf.h index 072cc6b3..77f7e9b1 100644 --- a/third_party/fmt/include/fmt/printf.h +++ b/third_party/fmt/include/fmt/printf.h @@ -200,7 +200,7 @@ class printf_width_handler { auto operator()(T value) -> unsigned { auto width = static_cast>(value); if (detail::is_negative(value)) { - specs_.align = align::left; + specs_.set_align(align::left); width = 0 - width; } unsigned int_max = to_unsigned(max_value()); @@ -234,7 +234,7 @@ class printf_arg_formatter : public arg_formatter { void write_null_pointer(bool is_string = false) { auto s = this->specs; - s.type = presentation_type::none; + s.set_type(presentation_type::none); write_bytes(this->out, is_string ? "(null)" : "(nil)", s); } @@ -254,16 +254,17 @@ class printf_arg_formatter : public arg_formatter { return; } format_specs s = this->specs; - if (s.type != presentation_type::none && s.type != presentation_type::chr) { + if (s.type() != presentation_type::none && + s.type() != presentation_type::chr) { return (*this)(static_cast(value)); } - s.sign = sign::none; - s.alt = false; - s.fill = ' '; // Ignore '0' flag for char types. + s.set_sign(sign::none); + s.clear_alt(); + s.set_fill(' '); // Ignore '0' flag for char types. // align::numeric needs to be overwritten here since the '0' flag is // ignored for non-numeric types - if (s.align == align::none || s.align == align::numeric) - s.align = align::right; + if (s.align() == align::none || s.align() == align::numeric) + s.set_align(align::right); write(this->out, static_cast(value), s); } @@ -276,14 +277,14 @@ class printf_arg_formatter : public arg_formatter { if (value) base::operator()(value); else - write_null_pointer(this->specs.type != presentation_type::pointer); + write_null_pointer(this->specs.type() != presentation_type::pointer); } void operator()(const wchar_t* value) { if (value) base::operator()(value); else - write_null_pointer(this->specs.type != presentation_type::pointer); + write_null_pointer(this->specs.type() != presentation_type::pointer); } void operator()(basic_string_view value) { base::operator()(value); } @@ -306,19 +307,19 @@ void parse_flags(format_specs& specs, const Char*& it, const Char* end) { for (; it != end; ++it) { switch (*it) { case '-': - specs.align = align::left; + specs.set_align(align::left); break; case '+': - specs.sign = sign::plus; + specs.set_sign(sign::plus); break; case '0': - specs.fill = '0'; + specs.set_fill('0'); break; case ' ': - if (specs.sign != sign::plus) specs.sign = sign::space; + if (specs.sign() != sign::plus) specs.set_sign(sign::space); break; case '#': - specs.alt = true; + specs.set_alt(); break; default: return; @@ -339,7 +340,7 @@ auto parse_header(const Char*& it, const Char* end, format_specs& specs, ++it; arg_index = value != -1 ? value : max_value(); } else { - if (c == '0') specs.fill = '0'; + if (c == '0') specs.set_fill('0'); if (value != 0) { // Nonzero value means that we parsed width and don't need to // parse it or flags again, so return now. @@ -444,7 +445,7 @@ void vprintf(buffer& buf, basic_string_view format, write(out, basic_string_view(start, to_unsigned(it - 1 - start))); auto specs = format_specs(); - specs.align = align::right; + specs.set_align(align::right); // Parse argument index, flags and width. int arg_index = parse_header(it, end, specs, get_arg); @@ -470,7 +471,7 @@ void vprintf(buffer& buf, basic_string_view format, // specified, the '0' flag is ignored if (specs.precision >= 0 && arg.is_integral()) { // Ignore '0' for non-numeric types or if '-' present. - specs.fill = ' '; + specs.set_fill(' '); } if (specs.precision >= 0 && arg.type() == type::cstring_type) { auto str = arg.visit(get_cstring()); @@ -480,13 +481,14 @@ void vprintf(buffer& buf, basic_string_view format, str, to_unsigned(nul != str_end ? nul - str : specs.precision)); arg = make_arg>(sv); } - if (specs.alt && arg.visit(is_zero_int())) specs.alt = false; - if (specs.fill.template get() == '0') { - if (arg.is_arithmetic() && specs.align != align::left) - specs.align = align::numeric; - else - specs.fill = ' '; // Ignore '0' flag for non-numeric types or if '-' - // flag is also present. + if (specs.alt() && arg.visit(is_zero_int())) specs.clear_alt(); + if (specs.fill_unit() == '0') { + if (arg.is_arithmetic() && specs.align() != align::left) { + specs.set_align(align::numeric); + } else { + // Ignore '0' flag for non-numeric types or if '-' flag is also present. + specs.set_fill(' '); + } } // Parse length and convert the argument to the required type. @@ -545,10 +547,10 @@ void vprintf(buffer& buf, basic_string_view format, } } bool upper = false; - specs.type = parse_printf_presentation_type(type, arg.type(), upper); - if (specs.type == presentation_type::none) + specs.set_type(parse_printf_presentation_type(type, arg.type(), upper)); + if (specs.type() == presentation_type::none) report_error("invalid format specifier"); - specs.upper = upper; + if (upper) specs.set_upper(); start = it; diff --git a/third_party/fmt/include/fmt/ranges.h b/third_party/fmt/include/fmt/ranges.h index 0d3dfbd8..47d9f4aa 100644 --- a/third_party/fmt/include/fmt/ranges.h +++ b/third_party/fmt/include/fmt/ranges.h @@ -330,7 +330,14 @@ struct formatter FMT_CONSTEXPR auto parse(ParseContext& ctx) -> decltype(ctx.begin()) { auto it = ctx.begin(); - if (it != ctx.end() && *it != '}') report_error("invalid format specifier"); + auto end = ctx.end(); + if (it != end && detail::to_ascii(*it) == 'n') { + ++it; + set_brackets({}, {}); + set_separator({}); + } + if (it != end && *it != '}') report_error("invalid format specifier"); + ctx.advance_to(it); detail::for_each(formatters_, detail::parse_empty_specs{ctx}); return it; } @@ -415,7 +422,7 @@ struct range_formatter< auto buf = basic_memory_buffer(); for (; it != end; ++it) buf.push_back(*it); auto specs = format_specs(); - specs.type = presentation_type::debug; + specs.set_type(presentation_type::debug); return detail::write( out, basic_string_view(buf.data(), buf.size()), specs); } diff --git a/third_party/fmt/include/fmt/std.h b/third_party/fmt/include/fmt/std.h index fb43940b..1e69b8b2 100644 --- a/third_party/fmt/include/fmt/std.h +++ b/third_party/fmt/include/fmt/std.h @@ -129,7 +129,9 @@ template struct formatter { it = detail::parse_align(it, end, specs_); if (it == end) return it; - it = detail::parse_dynamic_spec(it, end, specs_.width, width_ref_, ctx); + Char c = *it; + if ((c >= '0' && c <= '9') || c == '{') + it = detail::parse_width(it, end, specs_, width_ref_, ctx); if (it != end && *it == '?') { debug_ = true; ++it; @@ -145,8 +147,8 @@ template struct formatter { !path_type_ ? p.native() : p.generic_string(); - detail::handle_dynamic_spec(specs.width, width_ref_, - ctx); + detail::handle_dynamic_spec(specs.dynamic_width(), specs.width, width_ref_, + ctx); if (!debug_) { auto s = detail::get_path_string(p, path_string); return detail::write(ctx.out(), basic_string_view(s), specs); @@ -643,7 +645,7 @@ template struct formatter, Char> { if (c.real() != 0) { *out++ = Char('('); out = detail::write(out, c.real(), specs, ctx.locale()); - specs.sign = sign::plus; + specs.set_sign(sign::plus); out = detail::write(out, c.imag(), specs, ctx.locale()); if (!detail::isfinite(c.imag())) *out++ = Char(' '); *out++ = Char('i'); @@ -668,12 +670,11 @@ template struct formatter, Char> { auto format(const std::complex& c, FormatContext& ctx) const -> decltype(ctx.out()) { auto specs = specs_; - if (specs.width_ref.kind != detail::arg_id_kind::none || - specs.precision_ref.kind != detail::arg_id_kind::none) { - detail::handle_dynamic_spec(specs.width, - specs.width_ref, ctx); - detail::handle_dynamic_spec( - specs.precision, specs.precision_ref, ctx); + if (specs.dynamic()) { + detail::handle_dynamic_spec(specs.dynamic_width(), specs.width, + specs.width_ref, ctx); + detail::handle_dynamic_spec(specs.dynamic_precision(), specs.precision, + specs.precision_ref, ctx); } if (specs.width == 0) return do_format(c, specs, ctx, ctx.out()); @@ -681,12 +682,14 @@ template struct formatter, Char> { auto outer_specs = format_specs(); outer_specs.width = specs.width; - outer_specs.fill = specs.fill; - outer_specs.align = specs.align; + auto fill = specs.template fill(); + if (fill) + outer_specs.set_fill(basic_string_view(fill, specs.fill_size())); + outer_specs.set_align(specs.align()); specs.width = 0; - specs.fill = {}; - specs.align = align::none; + specs.set_fill({}); + specs.set_align(align::none); do_format(c, specs, ctx, basic_appender(buf)); return detail::write(ctx.out(), diff --git a/third_party/fmt/src/fmt.cc b/third_party/fmt/src/fmt.cc index fb514ad7..0a838bb1 100644 --- a/third_party/fmt/src/fmt.cc +++ b/third_party/fmt/src/fmt.cc @@ -127,9 +127,17 @@ extern "C++" { module :private; #endif +#ifdef FMT_ATTACH_TO_GLOBAL_MODULE +extern "C++" { +#endif + #if FMT_HAS_INCLUDE("format.cc") # include "format.cc" #endif #if FMT_OS && FMT_HAS_INCLUDE("os.cc") # include "os.cc" #endif + +#ifdef FMT_ATTACH_TO_GLOBAL_MODULE +} +#endif diff --git a/third_party/fmt/src/format.cc b/third_party/fmt/src/format.cc index 391d3a24..70d12d6b 100644 --- a/third_party/fmt/src/format.cc +++ b/third_party/fmt/src/format.cc @@ -26,6 +26,7 @@ template FMT_API auto thousands_sep_impl(locale_ref) -> thousands_sep_result; template FMT_API auto decimal_point_impl(locale_ref) -> char; +// DEPRECATED! template FMT_API void buffer::append(const char*, const char*); template FMT_API void vformat_to(buffer&, string_view, diff --git a/third_party/fmt/support/check-commits b/third_party/fmt/support/check-commits index 81fde9fe..11472d41 100755 --- a/third_party/fmt/support/check-commits +++ b/third_party/fmt/support/check-commits @@ -28,10 +28,10 @@ with tempfile.TemporaryDirectory() as work_dir: for commit in commits: check_call(['git', '-c', 'advice.detachedHead=false', 'checkout', commit], cwd=repo_dir) - return_code = run( + returncode = run( ['c++', '-std=c++11', '-O3', '-DNDEBUG', '-I', 'include', - 'src/format.cc', os.path.join(cwd, source)], cwd=repo_dir).return_code - if return_code != 0: + 'src/format.cc', os.path.join(cwd, source)], cwd=repo_dir).returncode + if returncode != 0: continue times = [] for i in range(5): diff --git a/third_party/fmt/support/mkdocs b/third_party/fmt/support/mkdocs index 6901918e..e554c1fd 100755 --- a/third_party/fmt/support/mkdocs +++ b/third_party/fmt/support/mkdocs @@ -15,6 +15,27 @@ path = env.get('PYTHONPATH') env['PYTHONPATH'] = \ (path + ':' if path else '') + os.path.join(support_dir, 'python') +redirect_page = \ +''' + + + + Redirecting + + + + + Redirecting to api... + + +''' + config_path = os.path.join(support_dir, 'mkdocs.yml') args = sys.argv[1:] if len(args) > 0: @@ -23,7 +44,7 @@ if len(args) > 0: git_url = 'https://github.com/' if 'CI' in os.environ else 'git@github.com:' site_repo = git_url + 'fmtlib/fmt.dev.git' - site_dir = os. path.join(build_dir, 'fmt.dev') + site_dir = os.path.join(build_dir, 'fmt.dev') try: shutil.rmtree(site_dir) except OSError as e: @@ -37,8 +58,19 @@ if len(args) > 0: config_build_path = os.path.join(build_dir, 'mkdocs.yml') shutil.copyfile(config_path, config_build_path) - sys.exit(call(['mike'] + args + ['--config-file', config_build_path, - '--branch', 'master'], cwd=site_dir, env=env)) + version = args[1] + ret = call(['mike'] + args + ['--config-file', config_build_path, + '--branch', 'master'], cwd=site_dir, env=env) + if ret != 0 or version == 'dev': + sys.exit(ret) + redirect_page_path = os.path.join(site_dir, version, 'api.html') + with open(redirect_page_path, "w") as file: + file.write(redirect_page) + ret = call(['git', 'add', redirect_page_path], cwd=site_dir) + if ret != 0: + sys.exit(ret) + ret = call(['git', 'commit', '--amend', '--no-edit'], cwd=site_dir) + sys.exit(ret) elif not command.startswith('-'): args += ['-f', config_path] sys.exit(call(['mkdocs'] + args, env=env)) diff --git a/third_party/fmt/support/python/mkdocstrings_handlers/cxx/__init__.py b/third_party/fmt/support/python/mkdocstrings_handlers/cxx/__init__.py index 4ade52ab..21dce201 100644 --- a/third_party/fmt/support/python/mkdocstrings_handlers/cxx/__init__.py +++ b/third_party/fmt/support/python/mkdocstrings_handlers/cxx/__init__.py @@ -1,317 +1,338 @@ # A basic mkdocstrings handler for {fmt}. # Copyright (c) 2012 - present, Victor Zverovich +# https://github.com/fmtlib/fmt/blob/master/LICENSE import os +import xml.etree.ElementTree as ElementTree from pathlib import Path +from subprocess import PIPE, STDOUT, CalledProcessError, Popen from typing import Any, List, Mapping, Optional -from subprocess import CalledProcessError, PIPE, Popen, STDOUT -import xml.etree.ElementTree as et from mkdocstrings.handlers.base import BaseHandler + class Definition: - '''A definition extracted by Doxygen.''' - def __init__(self, name: str, kind: Optional[str] = None, - node: Optional[et.Element] = None, - is_member: bool = False): - self.name = name - self.kind = kind if kind is not None else node.get('kind') - self.id = name if not is_member else None - self.params = None - self.members = None + """A definition extracted by Doxygen.""" + + def __init__(self, name: str, kind: Optional[str] = None, + node: Optional[ElementTree.Element] = None, + is_member: bool = False): + self.name = name + self.kind = kind if kind is not None else node.get('kind') + self.desc = None + self.id = name if not is_member else None + self.members = None + self.params = None + self.template_params = None + self.trailing_return_type = None + self.type = None + # A map from Doxygen to HTML tags. tag_map = { - 'bold': 'b', - 'emphasis': 'em', - 'computeroutput': 'code', - 'para': 'p', - 'programlisting': 'pre', - 'verbatim': 'pre' + 'bold': 'b', + 'emphasis': 'em', + 'computeroutput': 'code', + 'para': 'p', + 'programlisting': 'pre', + 'verbatim': 'pre' } # A map from Doxygen tags to text. tag_text_map = { - 'codeline': '', - 'highlight': '', - 'sp': ' ' + 'codeline': '', + 'highlight': '', + 'sp': ' ' } + def escape_html(s: str) -> str: - return s.replace("<", "<") - -def doxyxml2html(nodes: List[et.Element]): - out = '' - for n in nodes: - tag = tag_map.get(n.tag) - if not tag: - out += tag_text_map[n.tag] - out += '<' + tag + '>' if tag else '' - out += '' if tag == 'pre' else '' - if n.text: - out += escape_html(n.text) - out += doxyxml2html(n) - out += '' if tag == 'pre' else '' - out += '' if tag else '' - if n.tail: - out += n.tail - return out - -def convert_template_params(node: et.Element) -> Optional[List[Definition]]: - templateparamlist = node.find('templateparamlist') - if templateparamlist is None: - return None - params = [] - for param_node in templateparamlist.findall('param'): - name = param_node.find('declname') - param = Definition(name.text if name is not None else '', 'param') - param.type = param_node.find('type').text - params.append(param) - return params - -def get_description(node: et.Element) -> List[et.Element]: - return node.findall('briefdescription/para') + \ - node.findall('detaileddescription/para') - -def normalize_type(type: str) -> str: - type = type.replace('< ', '<').replace(' >', '>') - return type.replace(' &', '&').replace(' *', '*') - -def convert_type(type: et.Element) -> str: - if type is None: - return None - result = type.text if type.text else '' - for ref in type: - result += ref.text - if ref.tail: - result += ref.tail - result += type.tail.strip() - return normalize_type(result) - -def convert_params(func: et.Element) -> Definition: - params = [] - for p in func.findall('param'): - d = Definition(p.find('declname').text, 'param') - d.type = convert_type(p.find('type')) - params.append(d) - return params - -def convert_return_type(d: Definition, node: et.Element) -> None: - d.trailing_return_type = None - if d.type == 'auto' or d.type == 'constexpr auto': - parts = node.find('argsstring').text.split(' -> ') - if len(parts) > 1: - d.trailing_return_type = normalize_type(parts[1]) + return s.replace("<", "<") + + +def doxyxml2html(nodes: List[ElementTree.Element]): + out = '' + for n in nodes: + tag = tag_map.get(n.tag) + if not tag: + out += tag_text_map[n.tag] + out += '<' + tag + '>' if tag else '' + out += '' if tag == 'pre' else '' + if n.text: + out += escape_html(n.text) + out += doxyxml2html(list(n)) + out += '' if tag == 'pre' else '' + out += '' if tag else '' + if n.tail: + out += n.tail + return out + + +def convert_template_params(node: ElementTree.Element) -> Optional[List[Definition]]: + template_param_list = node.find('templateparamlist') + if template_param_list is None: + return None + params = [] + for param_node in template_param_list.findall('param'): + name = param_node.find('declname') + param = Definition(name.text if name is not None else '', 'param') + param.type = param_node.find('type').text + params.append(param) + return params + + +def get_description(node: ElementTree.Element) -> List[ElementTree.Element]: + return node.findall('briefdescription/para') + \ + node.findall('detaileddescription/para') + + +def normalize_type(type_: str) -> str: + type_ = type_.replace('< ', '<').replace(' >', '>') + return type_.replace(' &', '&').replace(' *', '*') + + +def convert_type(type_: ElementTree.Element) -> Optional[str]: + if type_ is None: + return None + result = type_.text if type_.text else '' + for ref in type_: + result += ref.text + if ref.tail: + result += ref.tail + result += type_.tail.strip() + return normalize_type(result) + + +def convert_params(func: ElementTree.Element) -> List[Definition]: + params = [] + for p in func.findall('param'): + d = Definition(p.find('declname').text, 'param') + d.type = convert_type(p.find('type')) + params.append(d) + return params + + +def convert_return_type(d: Definition, node: ElementTree.Element) -> None: + d.trailing_return_type = None + if d.type == 'auto' or d.type == 'constexpr auto': + parts = node.find('argsstring').text.split(' -> ') + if len(parts) > 1: + d.trailing_return_type = normalize_type(parts[1]) + def render_param(param: Definition) -> str: - return param.type + (f' {param.name}' if len(param.name) > 0 else '') - -def render_decl(d: Definition) -> None: - text = '' - if d.id is not None: - text += f'\n' - text += '
'
-
-  text += '
' - if d.template_params is not None: - text += 'template <' - text += ', '.join([render_param(p) for p in d.template_params]) - text += '>\n' - text += '
' - - text += '
' - end = ';' - if d.kind == 'function' or d.kind == 'variable': - text += d.type + ' ' if len(d.type) > 0 else '' - elif d.kind == 'typedef': - text += 'using ' - elif d.kind == 'define': - end = '' - else: - text += d.kind + ' ' - text += d.name - - if d.params is not None: - params = ', '.join([ - (p.type + ' ' if p.type else '') + p.name for p in d.params]) - text += '(' + escape_html(params) + ')' - if d.trailing_return_type: - text += ' -⁠> ' + escape_html(d.trailing_return_type) - elif d.kind == 'typedef': - text += ' = ' + escape_html(d.type) - - text += end - text += '
' - text += '
\n' - if d.id is not None: - text += f'
\n' - return text + return param.type + (f' {param.name}' if len(param.name) > 0 else '') -class CxxHandler(BaseHandler): - def __init__(self, **kwargs: Any) -> None: - super().__init__(handler='cxx', **kwargs) - - headers = [ - 'args.h', 'base.h', 'chrono.h', 'color.h', 'compile.h', 'format.h', - 'os.h', 'ostream.h', 'printf.h', 'ranges.h', 'std.h', 'xchar.h' - ] - - # Run doxygen. - cmd = ['doxygen', '-'] - support_dir = Path(__file__).parents[3] - top_dir = os.path.dirname(support_dir) - include_dir = os.path.join(top_dir, 'include', 'fmt') - self._ns2doxyxml = {} - build_dir = os.path.join(top_dir, 'build') - os.makedirs(build_dir, exist_ok=True) - self._doxyxml_dir = os.path.join(build_dir, 'doxyxml') - p = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=STDOUT) - _, _ = p.communicate(input=r''' - PROJECT_NAME = fmt - GENERATE_XML = YES - GENERATE_LATEX = NO - GENERATE_HTML = NO - INPUT = {0} - XML_OUTPUT = {1} - QUIET = YES - AUTOLINK_SUPPORT = NO - MACRO_EXPANSION = YES - PREDEFINED = _WIN32=1 \ - __linux__=1 \ - FMT_ENABLE_IF(...)= \ - FMT_USE_USER_DEFINED_LITERALS=1 \ - FMT_USE_ALIAS_TEMPLATES=1 \ - FMT_USE_NONTYPE_TEMPLATE_ARGS=1 \ - FMT_API= \ - "FMT_BEGIN_NAMESPACE=namespace fmt {{" \ - "FMT_END_NAMESPACE=}}" \ - "FMT_DOC=1" - '''.format( - ' '.join([os.path.join(include_dir, h) for h in headers]), - self._doxyxml_dir).encode('utf-8')) - if p.returncode != 0: - raise CalledProcessError(p.returncode, cmd) - - # Merge all file-level XMLs into one to simplify search. - self._file_doxyxml = None - for h in headers: - filename = h.replace(".h", "_8h.xml") - with open(os.path.join(self._doxyxml_dir, filename)) as f: - doxyxml = et.parse(f) - if self._file_doxyxml is None: - self._file_doxyxml = doxyxml - continue - root = self._file_doxyxml.getroot() - for node in doxyxml.getroot(): - root.append(node) - - def collect_compound(self, identifier: str, - cls: List[et.Element]) -> Definition: - '''Collect a compound definition such as a struct.''' - path = os.path.join(self._doxyxml_dir, cls[0].get('refid') + '.xml') - with open(path) as f: - xml = et.parse(f) - node = xml.find('compounddef') - d = Definition(identifier, node=node) - d.template_params = convert_template_params(node) - d.desc = get_description(node) - d.members = [] - for m in node.findall('sectiondef[@kind="public-attrib"]/memberdef') + \ - node.findall('sectiondef[@kind="public-func"]/memberdef'): - name = m.find('name').text - # Doxygen incorrectly classifies members of private unnamed unions as - # public members of the containing class. - if name.endswith('_'): - continue - desc = get_description(m) - if len(desc) == 0: - continue - kind = m.get('kind') - member = Definition(name if name else '', kind=kind, is_member=True) - type = m.find('type').text - member.type = type if type else '' - if kind == 'function': - member.params = convert_params(m) - convert_return_type(member, m) - member.template_params = None - member.desc = desc - d.members.append(member) - return d - - def collect(self, identifier: str, config: Mapping[str, Any]) -> Definition: - qual_name = 'fmt::' + identifier - - param_str = None - paren = qual_name.find('(') - if paren > 0: - qual_name, param_str = qual_name[:paren], qual_name[paren + 1:-1] - - colons = qual_name.rfind('::') - namespace, name = qual_name[:colons], qual_name[colons + 2:] - - # Load XML. - doxyxml = self._ns2doxyxml.get(namespace) - if doxyxml is None: - path = f'namespace{namespace.replace("::", "_1_1")}.xml' - with open(os.path.join(self._doxyxml_dir, path)) as f: - doxyxml = et.parse(f) - self._ns2doxyxml[namespace] = doxyxml - - nodes = doxyxml.findall( - f"compounddef/sectiondef/memberdef/name[.='{name}']/..") - if len(nodes) == 0: - nodes = self._file_doxyxml.findall( - f"compounddef/sectiondef/memberdef/name[.='{name}']/..") - candidates = [] - for node in nodes: - # Process a function or a typedef. - params = None - d = Definition(name, node=node) - if d.kind == 'function': - params = convert_params(node) - node_param_str = ', '.join([p.type for p in params]) - if param_str and param_str != node_param_str: - candidates.append(f'{name}({node_param_str})') - continue - elif d.kind == 'define': - params = [] - for p in node.findall('param'): - param = Definition(p.find('defname').text, kind='param') - param.type = None - params.append(param) - d.type = convert_type(node.find('type')) - d.template_params = convert_template_params(node) - d.params = params - convert_return_type(d, node) - d.desc = get_description(node) - return d - - cls = doxyxml.findall(f"compounddef/innerclass[.='{qual_name}']") - if not cls: - raise Exception(f'Cannot find {identifier}. Candidates: {candidates}') - return self.collect_compound(identifier, cls) - - def render(self, d: Definition, config: dict) -> str: + +def render_decl(d: Definition) -> str: + text = '' if d.id is not None: - self.do_heading('', 0, id=d.id) - text = '
\n' - text += render_decl(d) - text += '
\n' - text += doxyxml2html(d.desc) - if d.members is not None: - for m in d.members: - text += self.render(m, config) - text += '
\n' - text += '
\n' + text += f'\n' + text += '
'
+
+    text += '
' + if d.template_params is not None: + text += 'template <' + text += ', '.join([render_param(p) for p in d.template_params]) + text += '>\n' + text += '
' + + text += '
' + end = ';' + if d.kind == 'function' or d.kind == 'variable': + text += d.type + ' ' if len(d.type) > 0 else '' + elif d.kind == 'typedef': + text += 'using ' + elif d.kind == 'define': + end = '' + else: + text += d.kind + ' ' + text += d.name + + if d.params is not None: + params = ', '.join([ + (p.type + ' ' if p.type else '') + p.name for p in d.params]) + text += '(' + escape_html(params) + ')' + if d.trailing_return_type: + text += ' -⁠> ' + escape_html(d.trailing_return_type) + elif d.kind == 'typedef': + text += ' = ' + escape_html(d.type) + + text += end + text += '
' + text += '
\n' + if d.id is not None: + text += f'
\n' return text + +class CxxHandler(BaseHandler): + def __init__(self, **kwargs: Any) -> None: + super().__init__(handler='cxx', **kwargs) + + headers = [ + 'args.h', 'base.h', 'chrono.h', 'color.h', 'compile.h', 'format.h', + 'os.h', 'ostream.h', 'printf.h', 'ranges.h', 'std.h', 'xchar.h' + ] + + # Run doxygen. + cmd = ['doxygen', '-'] + support_dir = Path(__file__).parents[3] + top_dir = os.path.dirname(support_dir) + include_dir = os.path.join(top_dir, 'include', 'fmt') + self._ns2doxyxml = {} + build_dir = os.path.join(top_dir, 'build') + os.makedirs(build_dir, exist_ok=True) + self._doxyxml_dir = os.path.join(build_dir, 'doxyxml') + p = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=STDOUT) + _, _ = p.communicate(input=r''' + PROJECT_NAME = fmt + GENERATE_XML = YES + GENERATE_LATEX = NO + GENERATE_HTML = NO + INPUT = {0} + XML_OUTPUT = {1} + QUIET = YES + AUTOLINK_SUPPORT = NO + MACRO_EXPANSION = YES + PREDEFINED = _WIN32=1 \ + __linux__=1 \ + FMT_ENABLE_IF(...)= \ + FMT_USE_USER_DEFINED_LITERALS=1 \ + FMT_USE_ALIAS_TEMPLATES=1 \ + FMT_USE_NONTYPE_TEMPLATE_ARGS=1 \ + FMT_API= \ + "FMT_BEGIN_NAMESPACE=namespace fmt {{" \ + "FMT_END_NAMESPACE=}}" \ + "FMT_DOC=1" + '''.format( + ' '.join([os.path.join(include_dir, h) for h in headers]), + self._doxyxml_dir).encode('utf-8')) + if p.returncode != 0: + raise CalledProcessError(p.returncode, cmd) + + # Merge all file-level XMLs into one to simplify search. + self._file_doxyxml = None + for h in headers: + filename = h.replace(".h", "_8h.xml") + with open(os.path.join(self._doxyxml_dir, filename)) as f: + doxyxml = ElementTree.parse(f) + if self._file_doxyxml is None: + self._file_doxyxml = doxyxml + continue + root = self._file_doxyxml.getroot() + for node in doxyxml.getroot(): + root.append(node) + + def collect_compound(self, identifier: str, + cls: List[ElementTree.Element]) -> Definition: + """Collect a compound definition such as a struct.""" + path = os.path.join(self._doxyxml_dir, cls[0].get('refid') + '.xml') + with open(path) as f: + xml = ElementTree.parse(f) + node = xml.find('compounddef') + d = Definition(identifier, node=node) + d.template_params = convert_template_params(node) + d.desc = get_description(node) + d.members = [] + for m in \ + node.findall('sectiondef[@kind="public-attrib"]/memberdef') + \ + node.findall('sectiondef[@kind="public-func"]/memberdef'): + name = m.find('name').text + # Doxygen incorrectly classifies members of private unnamed unions as + # public members of the containing class. + if name.endswith('_'): + continue + desc = get_description(m) + if len(desc) == 0: + continue + kind = m.get('kind') + member = Definition(name if name else '', kind=kind, is_member=True) + type_text = m.find('type').text + member.type = type_text if type_text else '' + if kind == 'function': + member.params = convert_params(m) + convert_return_type(member, m) + member.template_params = None + member.desc = desc + d.members.append(member) + return d + + def collect(self, identifier: str, _config: Mapping[str, Any]) -> Definition: + qual_name = 'fmt::' + identifier + + param_str = None + paren = qual_name.find('(') + if paren > 0: + qual_name, param_str = qual_name[:paren], qual_name[paren + 1:-1] + + colons = qual_name.rfind('::') + namespace, name = qual_name[:colons], qual_name[colons + 2:] + + # Load XML. + doxyxml = self._ns2doxyxml.get(namespace) + if doxyxml is None: + path = f'namespace{namespace.replace("::", "_1_1")}.xml' + with open(os.path.join(self._doxyxml_dir, path)) as f: + doxyxml = ElementTree.parse(f) + self._ns2doxyxml[namespace] = doxyxml + + nodes = doxyxml.findall( + f"compounddef/sectiondef/memberdef/name[.='{name}']/..") + if len(nodes) == 0: + nodes = self._file_doxyxml.findall( + f"compounddef/sectiondef/memberdef/name[.='{name}']/..") + candidates = [] + for node in nodes: + # Process a function or a typedef. + params = None + d = Definition(name, node=node) + if d.kind == 'function': + params = convert_params(node) + node_param_str = ', '.join([p.type for p in params]) + if param_str and param_str != node_param_str: + candidates.append(f'{name}({node_param_str})') + continue + elif d.kind == 'define': + params = [] + for p in node.findall('param'): + param = Definition(p.find('defname').text, kind='param') + param.type = None + params.append(param) + d.type = convert_type(node.find('type')) + d.template_params = convert_template_params(node) + d.params = params + convert_return_type(d, node) + d.desc = get_description(node) + return d + + cls = doxyxml.findall(f"compounddef/innerclass[.='{qual_name}']") + if not cls: + raise Exception(f'Cannot find {identifier}. Candidates: {candidates}') + return self.collect_compound(identifier, cls) + + def render(self, d: Definition, config: dict) -> str: + if d.id is not None: + self.do_heading('', 0, id=d.id) + text = '
\n' + text += render_decl(d) + text += '
\n' + text += doxyxml2html(d.desc) + if d.members is not None: + for m in d.members: + text += self.render(m, config) + text += '
\n' + text += '
\n' + return text + + def get_handler(theme: str, custom_templates: Optional[str] = None, - **config: Any) -> CxxHandler: - '''Return an instance of `CxxHandler`. - - Arguments: - theme: The theme to use when rendering contents. - custom_templates: Directory containing custom templates. - **config: Configuration passed to the handler. - ''' - return CxxHandler(theme=theme, custom_templates=custom_templates) + **_config: Any) -> CxxHandler: + """Return an instance of `CxxHandler`. + + Arguments: + theme: The theme to use when rendering contents. + custom_templates: Directory containing custom templates. + **_config: Configuration passed to the handler. + """ + return CxxHandler(theme=theme, custom_templates=custom_templates) diff --git a/third_party/fmt/test/CMakeLists.txt b/third_party/fmt/test/CMakeLists.txt index a14dfc24..adb6fa6d 100644 --- a/third_party/fmt/test/CMakeLists.txt +++ b/third_party/fmt/test/CMakeLists.txt @@ -8,22 +8,6 @@ target_include_directories(test-main PUBLIC $) target_link_libraries(test-main gtest fmt) -function(add_fmt_executable name) - add_executable(${name} ${ARGN}) - # (Wstringop-overflow) - [meta-bug] bogus/missing -Wstringop-overflow warnings - # https://gcc.gnu.org/bugzilla/show_bug.cgi?id=88443 - # Bogus -Wstringop-overflow warning - # https://gcc.gnu.org/bugzilla/show_bug.cgi?id=100395 - # [10 Regression] spurious -Wstringop-overflow writing to a trailing array plus offset - # https://gcc.gnu.org/bugzilla/show_bug.cgi?id=95353 - if (CMAKE_CXX_COMPILER_ID MATCHES "GNU" AND - NOT CMAKE_CXX_COMPILER_VERSION VERSION_LESS 7.0) - target_compile_options(${name} PRIVATE -Wno-stringop-overflow) - # The linker flag is needed for LTO. - target_link_libraries(${name} -Wno-stringop-overflow) - endif () -endfunction() - # Adds a test. # Usage: add_fmt_test(name srcs...) function(add_fmt_test name) @@ -42,7 +26,7 @@ function(add_fmt_test name) else () set(libs test-main fmt) endif () - add_fmt_executable(${name} ${sources}) + add_executable(${name} ${sources}) target_link_libraries(${name} ${libs}) if (ADD_FMT_TEST_HEADER_ONLY AND NOT FMT_UNICODE) @@ -145,7 +129,7 @@ if (NOT DEFINED MSVC_STATIC_RUNTIME AND MSVC) endif() if (NOT MSVC_STATIC_RUNTIME) - add_fmt_executable(posix-mock-test + add_executable(posix-mock-test posix-mock-test.cc ../src/format.cc ${TEST_MAIN_SRC}) target_include_directories( posix-mock-test PRIVATE ${PROJECT_SOURCE_DIR}/include) diff --git a/third_party/fmt/test/base-test.cc b/third_party/fmt/test/base-test.cc index 76570036..54c97024 100644 --- a/third_party/fmt/test/base-test.cc +++ b/third_party/fmt/test/base-test.cc @@ -92,6 +92,10 @@ TEST(string_view_test, compare) { check_op(); } +TEST(base_test, is_locking) { + EXPECT_FALSE(fmt::detail::is_locking()); +} + TEST(base_test, is_output_iterator) { EXPECT_TRUE((fmt::detail::is_output_iterator::value)); EXPECT_FALSE((fmt::detail::is_output_iterator::value)); @@ -112,12 +116,6 @@ TEST(base_test, is_back_insert_iterator) { std::front_insert_iterator>::value); } -TEST(base_test, buffer_appender) { -#ifdef __cpp_lib_ranges - EXPECT_TRUE((std::output_iterator)); -#endif -} - #if !FMT_GCC_VERSION || FMT_GCC_VERSION >= 470 TEST(buffer_test, noncopyable) { EXPECT_FALSE(std::is_copy_constructible>::value); @@ -477,14 +475,12 @@ TEST(arg_test, visit_invalid_arg) { #if FMT_USE_CONSTEXPR -enum class arg_id_result { none, empty, index, name }; +enum class arg_id_result { none, index, name }; struct test_arg_id_handler { arg_id_result res = arg_id_result::none; int index = 0; string_view name; - constexpr void on_auto() { res = arg_id_result::empty; } - constexpr void on_index(int i) { res = arg_id_result::index; index = i; @@ -504,8 +500,6 @@ constexpr test_arg_id_handler parse_arg_id(const char (&s)[N]) { } TEST(base_test, constexpr_parse_arg_id) { - static_assert(parse_arg_id(":").res == arg_id_result::empty, ""); - static_assert(parse_arg_id("}").res == arg_id_result::empty, ""); static_assert(parse_arg_id("42:").res == arg_id_result::index, ""); static_assert(parse_arg_id("42:").index == 42, ""); static_assert(parse_arg_id("foo:").res == arg_id_result::name, ""); @@ -522,19 +516,19 @@ template constexpr auto parse_test_specs(const char (&s)[N]) { } TEST(base_test, constexpr_parse_format_specs) { - static_assert(parse_test_specs("<").align == fmt::align::left, ""); - static_assert(parse_test_specs("*^").fill.get() == '*', ""); - static_assert(parse_test_specs("+").sign == fmt::sign::plus, ""); - static_assert(parse_test_specs("-").sign == fmt::sign::minus, ""); - static_assert(parse_test_specs(" ").sign == fmt::sign::space, ""); - static_assert(parse_test_specs("#").alt, ""); - static_assert(parse_test_specs("0").align == fmt::align::numeric, ""); - static_assert(parse_test_specs("L").localized, ""); + static_assert(parse_test_specs("<").align() == fmt::align::left, ""); + static_assert(parse_test_specs("*^").fill_unit() == '*', ""); + static_assert(parse_test_specs("+").sign() == fmt::sign::plus, ""); + static_assert(parse_test_specs("-").sign() == fmt::sign::none, ""); + static_assert(parse_test_specs(" ").sign() == fmt::sign::space, ""); + static_assert(parse_test_specs("#").alt(), ""); + static_assert(parse_test_specs("0").align() == fmt::align::numeric, ""); + static_assert(parse_test_specs("L").localized(), ""); static_assert(parse_test_specs("42").width == 42, ""); - static_assert(parse_test_specs("{42}").width_ref.val.index == 42, ""); + static_assert(parse_test_specs("{42}").width_ref.index == 42, ""); static_assert(parse_test_specs(".42").precision == 42, ""); - static_assert(parse_test_specs(".{42}").precision_ref.val.index == 42, ""); - static_assert(parse_test_specs("f").type == fmt::presentation_type::fixed, + static_assert(parse_test_specs(".{42}").precision_ref.index == 42, ""); + static_assert(parse_test_specs("f").type() == fmt::presentation_type::fixed, ""); } @@ -897,3 +891,24 @@ TEST(base_test, trappy_conversion) { fmt::format_to(std::back_inserter(s), "{}", its_a_trap()); EXPECT_EQ(s, "x"); } + +struct custom_container { + char data; + + using value_type = char; + + size_t size() const { return 0; } + void resize(size_t) {} + + void push_back(char) {} + char& operator[](size_t) { return data; } +}; + +FMT_BEGIN_NAMESPACE +template <> struct is_contiguous : std::true_type {}; +FMT_END_NAMESPACE + +TEST(base_test, format_to_custom_container) { + auto c = custom_container(); + fmt::format_to(std::back_inserter(c), ""); +} diff --git a/third_party/fmt/test/compile-test.cc b/third_party/fmt/test/compile-test.cc index a4c350bc..5ba3edea 100644 --- a/third_party/fmt/test/compile-test.cc +++ b/third_party/fmt/test/compile-test.cc @@ -14,14 +14,6 @@ #include "gmock/gmock.h" #include "gtest-extra.h" -TEST(iterator_test, counting_iterator) { - auto it = fmt::detail::counting_iterator(); - auto prev = it++; - EXPECT_EQ(prev.count(), 0); - EXPECT_EQ(it.count(), 1); - EXPECT_EQ((it + 41).count(), 42); -} - TEST(compile_test, compile_fallback) { // FMT_COMPILE should fallback on runtime formatting when `if constexpr` is // not available. @@ -206,7 +198,7 @@ TEST(compile_test, format_to_n) { EXPECT_STREQ("2a", buffer); } -# ifdef __cpp_lib_bit_cast +# if FMT_USE_CONSTEVAL && (!FMT_MSC_VERSION || FMT_MSC_VERSION >= 1930) TEST(compile_test, constexpr_formatted_size) { FMT_CONSTEXPR20 size_t size = fmt::formatted_size(FMT_COMPILE("{}"), 42); EXPECT_EQ(size, 2); diff --git a/third_party/fmt/test/format-test.cc b/third_party/fmt/test/format-test.cc index b16f11cc..077109c6 100644 --- a/third_party/fmt/test/format-test.cc +++ b/third_party/fmt/test/format-test.cc @@ -26,6 +26,9 @@ #include // std::string #include // std::thread #include // std::is_default_constructible +#if FMT_CPLUSPLUS > 201703L && FMT_HAS_INCLUDE() +# include +#endif #include "gtest-extra.h" #include "mock-allocator.h" @@ -42,6 +45,10 @@ using fmt::detail::uint128_fallback; using testing::Return; using testing::StrictMock; +#ifdef __cpp_lib_concepts +static_assert(std::output_iterator); +#endif + enum { buffer_size = 256 }; TEST(uint128_test, ctor) { @@ -471,6 +478,12 @@ TEST(memory_buffer_test, max_size_allocator_overflow) { EXPECT_THROW(buffer.resize(161), std::exception); } +TEST(format_test, digits2_alignment) { + auto p = + fmt::detail::bit_cast(fmt::detail::digits2(0)); + EXPECT_EQ(p % 2, 0); +} + TEST(format_test, exception_from_lib) { EXPECT_THROW_MSG(fmt::report_error("test"), format_error, "test"); } @@ -884,23 +897,23 @@ TEST(format_test, runtime_width) { "invalid format string"); EXPECT_THROW_MSG((void)fmt::format(runtime("{0:{1}}"), 0, -1), format_error, - "negative width"); + "width/precision is out of range"); EXPECT_THROW_MSG((void)fmt::format(runtime("{0:{1}}"), 0, (INT_MAX + 1u)), - format_error, "number is too big"); + format_error, "width/precision is out of range"); EXPECT_THROW_MSG((void)fmt::format(runtime("{0:{1}}"), 0, -1l), format_error, - "negative width"); + "width/precision is out of range"); if (fmt::detail::const_check(sizeof(long) > sizeof(int))) { long value = INT_MAX; EXPECT_THROW_MSG((void)fmt::format(runtime("{0:{1}}"), 0, (value + 1)), - format_error, "number is too big"); + format_error, "width/precision is out of range"); } EXPECT_THROW_MSG((void)fmt::format(runtime("{0:{1}}"), 0, (INT_MAX + 1ul)), - format_error, "number is too big"); + format_error, "width/precision is out of range"); EXPECT_THROW_MSG((void)fmt::format(runtime("{0:{1}}"), 0, '0'), format_error, - "width is not integer"); + "width/precision is not integer"); EXPECT_THROW_MSG((void)fmt::format(runtime("{0:{1}}"), 0, 0.0), format_error, - "width is not integer"); + "width/precision is not integer"); EXPECT_EQ(fmt::format("{0:{1}}", -42, 4), " -42"); EXPECT_EQ(fmt::format("{0:{1}}", 42u, 5), " 42"); @@ -939,7 +952,7 @@ TEST(format_test, precision) { EXPECT_THROW_MSG((void)fmt::format(runtime("{0:."), 0.0), format_error, "invalid precision"); EXPECT_THROW_MSG((void)fmt::format(runtime("{0:.}"), 0.0), format_error, - "invalid precision"); + "invalid format string"); EXPECT_THROW_MSG((void)fmt::format(runtime("{0:.2"), 0), format_error, "invalid format specifier"); @@ -1060,13 +1073,15 @@ TEST(format_test, precision) { EXPECT_THROW_MSG( (void)fmt::format("{:.2147483646f}", -2.2121295195081227E+304), format_error, "number is too big"); + EXPECT_THROW_MSG((void)fmt::format(runtime("{:.f}"), 42.0), format_error, + "invalid format string"); EXPECT_EQ(fmt::format("{0:.2}", "str"), "st"); EXPECT_EQ(fmt::format("{0:.5}", "вожыкі"), "вожык"); EXPECT_EQ(fmt::format("{0:.6}", "123456\xad"), "123456"); } -TEST(xchar_test, utf8_precision) { +TEST(format_test, utf8_precision) { auto result = fmt::format("{:.4}", "caf\u00e9s"); // cafés EXPECT_EQ(fmt::detail::compute_width(result), 4); EXPECT_EQ(result, "caf\u00e9"); @@ -1103,23 +1118,23 @@ TEST(format_test, runtime_precision) { "invalid format string"); EXPECT_THROW_MSG((void)fmt::format(runtime("{0:.{1}}"), 0.0, -1), - format_error, "negative precision"); + format_error, "width/precision is out of range"); EXPECT_THROW_MSG((void)fmt::format(runtime("{0:.{1}}"), 0.0, (INT_MAX + 1u)), - format_error, "number is too big"); + format_error, "width/precision is out of range"); EXPECT_THROW_MSG((void)fmt::format(runtime("{0:.{1}}"), 0.0, -1l), - format_error, "negative precision"); + format_error, "width/precision is out of range"); if (fmt::detail::const_check(sizeof(long) > sizeof(int))) { long value = INT_MAX; EXPECT_THROW_MSG((void)fmt::format(runtime("{0:.{1}}"), 0.0, (value + 1)), - format_error, "number is too big"); + format_error, "width/precision is out of range"); } EXPECT_THROW_MSG((void)fmt::format(runtime("{0:.{1}}"), 0.0, (INT_MAX + 1ul)), - format_error, "number is too big"); + format_error, "width/precision is out of range"); EXPECT_THROW_MSG((void)fmt::format(runtime("{0:.{1}}"), 0.0, '0'), - format_error, "precision is not integer"); + format_error, "width/precision is not integer"); EXPECT_THROW_MSG((void)fmt::format(runtime("{0:.{1}}"), 0.0, 0.0), - format_error, "precision is not integer"); + format_error, "width/precision is not integer"); EXPECT_THROW_MSG((void)fmt::format(runtime("{0:.{1}}"), 42, 2), format_error, "invalid format specifier"); @@ -2057,6 +2072,13 @@ TEST(format_test, output_iterators) { EXPECT_EQ("42", s.str()); } +TEST(format_test, fill_via_appender) { + fmt::memory_buffer buf; + auto it = fmt::appender(buf); + std::fill_n(it, 3, '~'); + EXPECT_EQ(fmt::to_string(buf), "~~~"); +} + TEST(format_test, formatted_size) { EXPECT_EQ(2u, fmt::formatted_size("{}", 42)); EXPECT_EQ(2u, fmt::formatted_size(std::locale(), "{}", 42)); diff --git a/third_party/fmt/test/perf-sanity.cc b/third_party/fmt/test/perf-sanity.cc index 7696b2f1..aa0c0cae 100644 --- a/third_party/fmt/test/perf-sanity.cc +++ b/third_party/fmt/test/perf-sanity.cc @@ -19,8 +19,7 @@ int main() { std::atomic_signal_fence(std::memory_order_acq_rel); // Clobber memory. auto end = std::chrono::steady_clock::now(); + // Print time in milliseconds. std::chrono::duration duration = end - start; - double total_time = duration.count() * 1000; // Convert to milliseconds. - fmt::print("Total time for formatting {} strings: {:.1f} ms.\n", n, - total_time); + fmt::print("{:.1f}\n", duration.count() * 1000); } diff --git a/third_party/fmt/test/ranges-test.cc b/third_party/fmt/test/ranges-test.cc index 1a5b5a70..40d545b1 100644 --- a/third_party/fmt/test/ranges-test.cc +++ b/third_party/fmt/test/ranges-test.cc @@ -170,6 +170,8 @@ TEST(ranges_test, format_adl_begin_end) { TEST(ranges_test, format_pair) { auto p = std::pair(42, 1.5f); EXPECT_EQ(fmt::format("{}", p), "(42, 1.5)"); + EXPECT_EQ(fmt::format("{:}", p), "(42, 1.5)"); + EXPECT_EQ(fmt::format("{:n}", p), "421.5"); } struct unformattable {}; @@ -178,6 +180,7 @@ TEST(ranges_test, format_tuple) { auto t = std::tuple(42, 1.5f, "this is tuple", 'i'); EXPECT_EQ(fmt::format("{}", t), "(42, 1.5, \"this is tuple\", 'i')"); + EXPECT_EQ(fmt::format("{:n}", t), "421.5\"this is tuple\"'i'"); EXPECT_EQ(fmt::format("{}", std::tuple<>()), "()"); diff --git a/third_party/fmt/test/scan.h b/third_party/fmt/test/scan.h index 1bcdc548..304e692e 100644 --- a/third_party/fmt/test/scan.h +++ b/third_party/fmt/test/scan.h @@ -368,7 +368,7 @@ const char* parse_scan_specs(const char* begin, const char* end, switch (to_ascii(*begin)) { // TODO: parse more scan format specifiers case 'x': - specs.type = presentation_type::hex; + specs.set_type(presentation_type::hex); ++begin; break; case '}': @@ -437,7 +437,7 @@ auto read_hex(scan_iterator it, T& value) -> scan_iterator { template ::value)> auto read(scan_iterator it, T& value, const format_specs& specs) -> scan_iterator { - if (specs.type == presentation_type::hex) return read_hex(it, value); + if (specs.type() == presentation_type::hex) return read_hex(it, value); return read(it, value); } diff --git a/third_party/fmt/test/std-test.cc b/third_party/fmt/test/std-test.cc index 1327fe82..bcc7bd5f 100644 --- a/third_party/fmt/test/std-test.cc +++ b/third_party/fmt/test/std-test.cc @@ -35,6 +35,9 @@ TEST(std_test, path) { L"\x0447\x044B\x043D\x0430")), "Шчучыншчына"); EXPECT_EQ(fmt::format("{}", path(L"\xd800")), "�"); + EXPECT_EQ(fmt::format("{}", path(L"HEAD \xd800 TAIL")), "HEAD � TAIL"); + EXPECT_EQ(fmt::format("{}", path(L"HEAD \xD83D\xDE00 TAIL")), "HEAD \xF0\x9F\x98\x80 TAIL"); + EXPECT_EQ(fmt::format("{}", path(L"HEAD \xD83D\xD83D\xDE00 TAIL")), "HEAD �\xF0\x9F\x98\x80 TAIL"); EXPECT_EQ(fmt::format("{:?}", path(L"\xd800")), "\"\\ud800\""); # endif } diff --git a/third_party/googletest/BUILD.bazel b/third_party/googletest/BUILD.bazel index e407ae29..0306468e 100644 --- a/third_party/googletest/BUILD.bazel +++ b/third_party/googletest/BUILD.bazel @@ -138,19 +138,19 @@ cc_library( }), deps = select({ ":has_absl": [ - "@com_google_absl//absl/container:flat_hash_set", - "@com_google_absl//absl/debugging:failure_signal_handler", - "@com_google_absl//absl/debugging:stacktrace", - "@com_google_absl//absl/debugging:symbolize", - "@com_google_absl//absl/flags:flag", - "@com_google_absl//absl/flags:parse", - "@com_google_absl//absl/flags:reflection", - "@com_google_absl//absl/flags:usage", - "@com_google_absl//absl/strings", - "@com_google_absl//absl/types:any", - "@com_google_absl//absl/types:optional", - "@com_google_absl//absl/types:variant", - "@com_googlesource_code_re2//:re2", + "@abseil-cpp//absl/container:flat_hash_set", + "@abseil-cpp//absl/debugging:failure_signal_handler", + "@abseil-cpp//absl/debugging:stacktrace", + "@abseil-cpp//absl/debugging:symbolize", + "@abseil-cpp//absl/flags:flag", + "@abseil-cpp//absl/flags:parse", + "@abseil-cpp//absl/flags:reflection", + "@abseil-cpp//absl/flags:usage", + "@abseil-cpp//absl/strings", + "@abseil-cpp//absl/types:any", + "@abseil-cpp//absl/types:optional", + "@abseil-cpp//absl/types:variant", + "@re2//:re2", ], "//conditions:default": [], }) + select({ diff --git a/third_party/googletest/CMakeLists.txt b/third_party/googletest/CMakeLists.txt index 737b7209..512e5c3d 100644 --- a/third_party/googletest/CMakeLists.txt +++ b/third_party/googletest/CMakeLists.txt @@ -4,7 +4,7 @@ cmake_minimum_required(VERSION 3.13) project(googletest-distribution) -set(GOOGLETEST_VERSION 1.15.0) +set(GOOGLETEST_VERSION 1.15.2) if(NOT CYGWIN AND NOT MSYS AND NOT ${CMAKE_SYSTEM_NAME} STREQUAL QNX) set(CMAKE_CXX_EXTENSIONS OFF) diff --git a/third_party/googletest/MODULE.bazel b/third_party/googletest/MODULE.bazel index b179fe93..c9a52e05 100644 --- a/third_party/googletest/MODULE.bazel +++ b/third_party/googletest/MODULE.bazel @@ -40,22 +40,28 @@ module( # Please keep the versions in sync with the versions in the WORKSPACE file. bazel_dep(name = "abseil-cpp", - version = "20240116.2", - repo_name = "com_google_absl") + version = "20240116.2") bazel_dep(name = "platforms", version = "0.0.10") bazel_dep(name = "re2", - repo_name = "com_googlesource_code_re2", version = "2024-07-02") bazel_dep(name = "rules_python", - version = "0.29.0") + version = "0.34.0", + dev_dependency = True) +# https://rules-python.readthedocs.io/en/stable/toolchains.html#library-modules-with-dev-only-python-usage +python = use_extension( + "@rules_python//python/extensions:python.bzl", + "python", + dev_dependency = True +) + +python.toolchain(python_version = "3.12", + is_default = True, + ignore_root_user_error = True) fake_fuchsia_sdk = use_repo_rule("//:fake_fuchsia_sdk.bzl", "fake_fuchsia_sdk") fake_fuchsia_sdk(name = "fuchsia_sdk") - -# https://github.com/bazelbuild/rules_python/blob/main/BZLMOD_SUPPORT.md#default-toolchain-is-not-the-local-system-python -register_toolchains("@bazel_tools//tools/python:autodetecting_toolchain") diff --git a/third_party/googletest/README.md b/third_party/googletest/README.md index f50c6705..03c70a1e 100644 --- a/third_party/googletest/README.md +++ b/third_party/googletest/README.md @@ -9,7 +9,7 @@ GoogleTest now follows the We recommend [updating to the latest commit in the `main` branch as often as possible](https://github.com/abseil/abseil-cpp/blob/master/FAQ.md#what-is-live-at-head-and-how-do-i-do-it). We do publish occasional semantic versions, tagged with -`v${major}.${minor}.${patch}` (e.g. `v1.15.0`). +`v${major}.${minor}.${patch}` (e.g. `v1.15.2`). #### Documentation Updates @@ -17,9 +17,9 @@ Our documentation is now live on GitHub Pages at https://google.github.io/googletest/. We recommend browsing the documentation on GitHub Pages rather than directly in the repository. -#### Release 1.15.0 +#### Release 1.15.2 -[Release 1.15.0](https://github.com/google/googletest/releases/tag/v1.15.0) is +[Release 1.15.2](https://github.com/google/googletest/releases/tag/v1.15.2) is now available. The 1.15.x branch requires at least C++14. diff --git a/third_party/googletest/WORKSPACE b/third_party/googletest/WORKSPACE index 218e6c2e..63f76813 100644 --- a/third_party/googletest/WORKSPACE +++ b/third_party/googletest/WORKSPACE @@ -1,4 +1,34 @@ -workspace(name = "com_google_googletest") +# Copyright 2024 Google Inc. +# All Rights Reserved. +# +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +workspace(name = "googletest") load("//:googletest_deps.bzl", "googletest_deps") googletest_deps() diff --git a/third_party/googletest/ci/windows-presubmit.bat b/third_party/googletest/ci/windows-presubmit.bat index 9753f9c0..1adc1a16 100644 --- a/third_party/googletest/ci/windows-presubmit.bat +++ b/third_party/googletest/ci/windows-presubmit.bat @@ -46,8 +46,13 @@ RMDIR /S /Q cmake_msvc2022 :: ---------------------------------------------------------------------------- :: Bazel +:: The default home directory on Kokoro is a long path which causes errors +:: because of Windows limitations on path length. +:: --output_user_root=C:\tmp causes Bazel to use a shorter path. SET BAZEL_VS=C:\Program Files\Microsoft Visual Studio\2022\Community -%BAZEL_EXE% test ... ^ +%BAZEL_EXE% ^ + --output_user_root=C:\tmp ^ + test ... ^ --compilation_mode=dbg ^ --copt=/std:c++14 ^ --copt=/WX ^ diff --git a/third_party/googletest/docs/quickstart-bazel.md b/third_party/googletest/docs/quickstart-bazel.md index d14f7c6c..5750f026 100644 --- a/third_party/googletest/docs/quickstart-bazel.md +++ b/third_party/googletest/docs/quickstart-bazel.md @@ -48,7 +48,7 @@ with the following content: # Choose the most recent version available at # https://registry.bazel.build/modules/googletest -bazel_dep(name = "googletest", version = "1.15.0") +bazel_dep(name = "googletest", version = "1.15.2") ``` Now you're ready to build C++ code that uses GoogleTest. diff --git a/third_party/googletest/googlemock/include/gmock/gmock-actions.h b/third_party/googletest/googlemock/include/gmock/gmock-actions.h index cd129969..aa470799 100644 --- a/third_party/googletest/googlemock/include/gmock/gmock-actions.h +++ b/third_party/googletest/googlemock/include/gmock/gmock-actions.h @@ -1493,6 +1493,7 @@ class DoAllAction { // providing a call operator because even with a particular set of arguments // they don't have a fixed return type. + // We support conversion to OnceAction whenever the sub-action does. template >::value, @@ -1501,6 +1502,21 @@ class DoAllAction { return std::move(final_action_); } + // We also support conversion to OnceAction whenever the sub-action supports + // conversion to Action (since any Action can also be a OnceAction). + template < + typename R, typename... Args, + typename std::enable_if< + conjunction< + negation< + std::is_convertible>>, + std::is_convertible>>::value, + int>::type = 0> + operator OnceAction() && { // NOLINT + return Action(std::move(final_action_)); + } + + // We support conversion to Action whenever the sub-action does. template < typename R, typename... Args, typename std::enable_if< @@ -1580,16 +1596,16 @@ class DoAllAction : Base({}, std::forward(other_actions)...), initial_action_(std::forward(initial_action)) {} - template ...)>>, - std::is_convertible>>::value, - int>::type = 0> + // We support conversion to OnceAction whenever both the initial action and + // the rest support conversion to OnceAction. + template < + typename R, typename... Args, + typename std::enable_if< + conjunction...)>>, + std::is_convertible>>::value, + int>::type = 0> operator OnceAction() && { // NOLINT // Return an action that first calls the initial action with arguments // filtered through InitialActionArgType, then forwards arguments directly @@ -1612,12 +1628,34 @@ class DoAllAction }; } + // We also support conversion to OnceAction whenever the initial action + // supports conversion to Action (since any Action can also be a OnceAction). + // + // The remaining sub-actions must also be compatible, but we don't need to + // special case them because the base class deals with them. + template < + typename R, typename... Args, + typename std::enable_if< + conjunction< + negation...)>>>, + std::is_convertible...)>>, + std::is_convertible>>::value, + int>::type = 0> + operator OnceAction() && { // NOLINT + return DoAll( + Action...)>(std::move(initial_action_)), + std::move(static_cast(*this))); + } + + // We support conversion to Action whenever both the initial action and the + // rest support conversion to Action. template < typename R, typename... Args, typename std::enable_if< conjunction< - // Both the initial action and the rest must support conversion to - // Action. std::is_convertible...)>>, std::is_convertible>>::value, @@ -1665,8 +1703,9 @@ template struct ReturnArgAction { template ::type> - auto operator()(Args&&... args) const -> decltype(std::get( - std::forward_as_tuple(std::forward(args)...))) { + auto operator()(Args&&... args) const + -> decltype(std::get( + std::forward_as_tuple(std::forward(args)...))) { return std::get(std::forward_as_tuple(std::forward(args)...)); } }; diff --git a/third_party/googletest/googlemock/include/gmock/gmock-matchers.h b/third_party/googletest/googlemock/include/gmock/gmock-matchers.h index 063ee6ca..3daf6173 100644 --- a/third_party/googletest/googlemock/include/gmock/gmock-matchers.h +++ b/third_party/googletest/googlemock/include/gmock/gmock-matchers.h @@ -1300,34 +1300,48 @@ class AllOfMatcherImpl : public MatcherInterface { bool MatchAndExplain(const T& x, MatchResultListener* listener) const override { - // If either matcher1_ or matcher2_ doesn't match x, we only need - // to explain why one of them fails. + // This method uses matcher's explanation when explaining the result. + // However, if matcher doesn't provide one, this method uses matcher's + // description. std::string all_match_result; - - for (size_t i = 0; i < matchers_.size(); ++i) { + for (const Matcher& matcher : matchers_) { StringMatchResultListener slistener; - if (matchers_[i].MatchAndExplain(x, &slistener)) { - if (all_match_result.empty()) { - all_match_result = slistener.str(); + // Return explanation for first failed matcher. + if (!matcher.MatchAndExplain(x, &slistener)) { + const std::string explanation = slistener.str(); + if (!explanation.empty()) { + *listener << explanation; } else { - std::string result = slistener.str(); - if (!result.empty()) { - all_match_result += ", and "; - all_match_result += result; - } + *listener << "which doesn't match (" << Describe(matcher) << ")"; } - } else { - *listener << slistener.str(); return false; } + // Keep track of explanations in case all matchers succeed. + std::string explanation = slistener.str(); + if (explanation.empty()) { + explanation = Describe(matcher); + } + if (all_match_result.empty()) { + all_match_result = explanation; + } else { + if (!explanation.empty()) { + all_match_result += ", and "; + all_match_result += explanation; + } + } } - // Otherwise we need to explain why *both* of them match. *listener << all_match_result; return true; } private: + // Returns matcher description as a string. + std::string Describe(const Matcher& matcher) const { + StringMatchResultListener listener; + matcher.DescribeTo(listener.stream()); + return listener.str(); + } const std::vector> matchers_; }; diff --git a/third_party/googletest/googlemock/include/gmock/gmock-more-actions.h b/third_party/googletest/googlemock/include/gmock/gmock-more-actions.h index e341d47f..55294dbd 100644 --- a/third_party/googletest/googlemock/include/gmock/gmock-more-actions.h +++ b/third_party/googletest/googlemock/include/gmock/gmock-more-actions.h @@ -601,9 +601,10 @@ template struct InvokeArgumentAction { template ::type> - auto operator()(Args &&...args) const -> decltype(internal::InvokeArgument( - std::get(std::forward_as_tuple(std::forward(args)...)), - std::declval()...)) { + auto operator()(Args &&...args) const + -> decltype(internal::InvokeArgument( + std::get(std::forward_as_tuple(std::forward(args)...)), + std::declval()...)) { internal::FlatTuple args_tuple(FlatTupleConstructTag{}, std::forward(args)...); return params.Apply([&](const Params &...unpacked_params) { diff --git a/third_party/googletest/googlemock/include/gmock/gmock-spec-builders.h b/third_party/googletest/googlemock/include/gmock/gmock-spec-builders.h index 78ca15d0..c4c42b7c 100644 --- a/third_party/googletest/googlemock/include/gmock/gmock-spec-builders.h +++ b/third_party/googletest/googlemock/include/gmock/gmock-spec-builders.h @@ -868,7 +868,7 @@ class GTEST_API_ ExpectationBase { Clause last_clause_; mutable bool action_count_checked_; // Under mutex_. mutable Mutex mutex_; // Protects action_count_checked_. -}; // class ExpectationBase +}; // class ExpectationBase template class TypedExpectation; @@ -1838,9 +1838,8 @@ R FunctionMocker::InvokeWith(ArgumentTuple&& args) // Doing so slows down compilation dramatically because the *constructor* of // std::function is re-instantiated with different template // parameters each time. - const UninterestingCallCleanupHandler report_uninteresting_call = { - reaction, ss - }; + const UninterestingCallCleanupHandler report_uninteresting_call = {reaction, + ss}; return PerformActionAndPrintResult(nullptr, std::move(args), ss.str(), ss); } @@ -1890,8 +1889,7 @@ R FunctionMocker::InvokeWith(ArgumentTuple&& args) // std::function is re-instantiated with different template // parameters each time. const FailureCleanupHandler handle_failures = { - ss, why, loc, untyped_expectation, found, is_excessive - }; + ss, why, loc, untyped_expectation, found, is_excessive}; return PerformActionAndPrintResult(untyped_action, std::move(args), ss.str(), ss); diff --git a/third_party/googletest/googlemock/include/gmock/internal/gmock-port.h b/third_party/googletest/googlemock/include/gmock/internal/gmock-port.h index e9d9e32a..42d36d2f 100644 --- a/third_party/googletest/googlemock/include/gmock/internal/gmock-port.h +++ b/third_party/googletest/googlemock/include/gmock/internal/gmock-port.h @@ -42,6 +42,7 @@ #include #include + #include #include diff --git a/third_party/googletest/googlemock/src/gmock-cardinalities.cc b/third_party/googletest/googlemock/src/gmock-cardinalities.cc index 92cde348..a7283aaf 100644 --- a/third_party/googletest/googlemock/src/gmock-cardinalities.cc +++ b/third_party/googletest/googlemock/src/gmock-cardinalities.cc @@ -53,12 +53,12 @@ class BetweenCardinalityImpl : public CardinalityInterface { : min_(min >= 0 ? min : 0), max_(max >= min_ ? max : min_) { std::stringstream ss; if (min < 0) { - ss << "The invocation lower bound must be >= 0, " - << "but is actually " << min << "."; + ss << "The invocation lower bound must be >= 0, " << "but is actually " + << min << "."; internal::Expect(false, __FILE__, __LINE__, ss.str()); } else if (max < 0) { - ss << "The invocation upper bound must be >= 0, " - << "but is actually " << max << "."; + ss << "The invocation upper bound must be >= 0, " << "but is actually " + << max << "."; internal::Expect(false, __FILE__, __LINE__, ss.str()); } else if (min > max) { ss << "The invocation upper bound (" << max diff --git a/third_party/googletest/googlemock/test/gmock-actions_test.cc b/third_party/googletest/googlemock/test/gmock-actions_test.cc index da1675c5..82c22c31 100644 --- a/third_party/googletest/googlemock/test/gmock-actions_test.cc +++ b/third_party/googletest/googlemock/test/gmock-actions_test.cc @@ -441,8 +441,8 @@ TEST(DefaultValueDeathTest, GetReturnsBuiltInDefaultValueWhenUnset) { EXPECT_EQ(0, DefaultValue::Get()); - EXPECT_DEATH_IF_SUPPORTED({ DefaultValue::Get(); }, - ""); + EXPECT_DEATH_IF_SUPPORTED( + { DefaultValue::Get(); }, ""); } TEST(DefaultValueTest, GetWorksForMoveOnlyIfSet) { @@ -505,8 +505,8 @@ TEST(DefaultValueOfReferenceDeathTest, GetReturnsBuiltInDefaultValueWhenUnset) { EXPECT_FALSE(DefaultValue::IsSet()); EXPECT_DEATH_IF_SUPPORTED({ DefaultValue::Get(); }, ""); - EXPECT_DEATH_IF_SUPPORTED({ DefaultValue::Get(); }, - ""); + EXPECT_DEATH_IF_SUPPORTED( + { DefaultValue::Get(); }, ""); } // Tests that ActionInterface can be implemented by defining the @@ -1477,6 +1477,54 @@ TEST(DoAll, SupportsTypeErasedActions) { } } +// A DoAll action should be convertible to a OnceAction, even when its component +// sub-actions are user-provided types that define only an Action conversion +// operator. If they supposed being called more than once then they also support +// being called at most once. +TEST(DoAll, ConvertibleToOnceActionWithUserProvidedActionConversion) { + // Simplest case: only one sub-action. + struct CustomFinal final { + operator Action() { // NOLINT + return Return(17); + } + + operator Action() { // NOLINT + return Return(19); + } + }; + + { + OnceAction action = DoAll(CustomFinal{}); + EXPECT_EQ(17, std::move(action).Call()); + } + + { + OnceAction action = DoAll(CustomFinal{}); + EXPECT_EQ(19, std::move(action).Call(0, 0)); + } + + // It should also work with multiple sub-actions. + struct CustomInitial final { + operator Action() { // NOLINT + return [] {}; + } + + operator Action() { // NOLINT + return [] {}; + } + }; + + { + OnceAction action = DoAll(CustomInitial{}, CustomFinal{}); + EXPECT_EQ(17, std::move(action).Call()); + } + + { + OnceAction action = DoAll(CustomInitial{}, CustomFinal{}); + EXPECT_EQ(19, std::move(action).Call(0, 0)); + } +} + // Tests using WithArgs and with an action that takes 1 argument. TEST(WithArgsTest, OneArg) { Action a = WithArgs<1>(Invoke(Unary)); // NOLINT diff --git a/third_party/googletest/googlemock/test/gmock-matchers-arithmetic_test.cc b/third_party/googletest/googlemock/test/gmock-matchers-arithmetic_test.cc index f1769628..6968f55b 100644 --- a/third_party/googletest/googlemock/test/gmock-matchers-arithmetic_test.cc +++ b/third_party/googletest/googlemock/test/gmock-matchers-arithmetic_test.cc @@ -36,7 +36,9 @@ #include #include +#include "gmock/gmock.h" #include "test/gmock-matchers_test.h" +#include "gtest/gtest.h" // Silence warning C4244: 'initializing': conversion from 'int' to 'short', // possible loss of data and C4100, unreferenced local parameter @@ -559,10 +561,9 @@ TEST_P(AllOfTestP, ExplainsResult) { Matcher m; // Successful match. Both matchers need to explain. The second - // matcher doesn't give an explanation, so only the first matcher's - // explanation is printed. + // matcher doesn't give an explanation, so the matcher description is used. m = AllOf(GreaterThan(10), Lt(30)); - EXPECT_EQ("which is 15 more than 10", Explain(m, 25)); + EXPECT_EQ("which is 15 more than 10, and is < 30", Explain(m, 25)); // Successful match. Both matchers need to explain. m = AllOf(GreaterThan(10), GreaterThan(20)); @@ -572,8 +573,9 @@ TEST_P(AllOfTestP, ExplainsResult) { // Successful match. All matchers need to explain. The second // matcher doesn't given an explanation. m = AllOf(GreaterThan(10), Lt(30), GreaterThan(20)); - EXPECT_EQ("which is 15 more than 10, and which is 5 more than 20", - Explain(m, 25)); + EXPECT_EQ( + "which is 15 more than 10, and is < 30, and which is 5 more than 20", + Explain(m, 25)); // Successful match. All matchers need to explain. m = AllOf(GreaterThan(10), GreaterThan(20), GreaterThan(30)); @@ -588,10 +590,10 @@ TEST_P(AllOfTestP, ExplainsResult) { EXPECT_EQ("which is 5 less than 10", Explain(m, 5)); // Failed match. The second matcher, which failed, needs to - // explain. Since it doesn't given an explanation, nothing is + // explain. Since it doesn't given an explanation, the matcher text is // printed. m = AllOf(GreaterThan(10), Lt(30)); - EXPECT_EQ("", Explain(m, 40)); + EXPECT_EQ("which doesn't match (is < 30)", Explain(m, 40)); // Failed match. The second matcher, which failed, needs to // explain. diff --git a/third_party/googletest/googlemock/test/gmock-matchers-comparisons_test.cc b/third_party/googletest/googlemock/test/gmock-matchers-comparisons_test.cc index 5b75b457..a324c4c7 100644 --- a/third_party/googletest/googlemock/test/gmock-matchers-comparisons_test.cc +++ b/third_party/googletest/googlemock/test/gmock-matchers-comparisons_test.cc @@ -37,13 +37,14 @@ #include #include +#include "gmock/gmock.h" #include "test/gmock-matchers_test.h" +#include "gtest/gtest.h" // Silence warning C4244: 'initializing': conversion from 'int' to 'short', // possible loss of data and C4100, unreferenced local parameter GTEST_DISABLE_MSC_WARNINGS_PUSH_(4244 4100) - namespace testing { namespace gmock_matchers_test { namespace { @@ -2334,9 +2335,11 @@ TEST(ExplainMatchResultTest, AllOf_True_True) { EXPECT_EQ("which is 0 modulo 2, and which is 0 modulo 3", Explain(m, 6)); } +// Tests that when AllOf() succeeds, but matchers have no explanation, +// the matcher description is used. TEST(ExplainMatchResultTest, AllOf_True_True_2) { const Matcher m = AllOf(Ge(2), Le(3)); - EXPECT_EQ("", Explain(m, 2)); + EXPECT_EQ("is >= 2, and is <= 3", Explain(m, 2)); } INSTANTIATE_GTEST_MATCHER_TEST_P(ExplainmatcherResultTest); diff --git a/third_party/googletest/googlemock/test/gmock-matchers-containers_test.cc b/third_party/googletest/googlemock/test/gmock-matchers-containers_test.cc index acea4ec3..52b52d5d 100644 --- a/third_party/googletest/googlemock/test/gmock-matchers-containers_test.cc +++ b/third_party/googletest/googlemock/test/gmock-matchers-containers_test.cc @@ -43,14 +43,14 @@ #include #include +#include "gmock/gmock.h" +#include "test/gmock-matchers_test.h" #include "gtest/gtest.h" // Silence warning C4244: 'initializing': conversion from 'int' to 'short', // possible loss of data and C4100, unreferenced local parameter GTEST_DISABLE_MSC_WARNINGS_PUSH_(4244 4100) -#include "test/gmock-matchers_test.h" - namespace testing { namespace gmock_matchers_test { namespace { diff --git a/third_party/googletest/googlemock/test/gmock-matchers-misc_test.cc b/third_party/googletest/googlemock/test/gmock-matchers-misc_test.cc index b8f64587..2bb4cdbe 100644 --- a/third_party/googletest/googlemock/test/gmock-matchers-misc_test.cc +++ b/third_party/googletest/googlemock/test/gmock-matchers-misc_test.cc @@ -39,14 +39,14 @@ #include #include +#include "gmock/gmock.h" +#include "test/gmock-matchers_test.h" #include "gtest/gtest.h" // Silence warning C4244: 'initializing': conversion from 'int' to 'short', // possible loss of data and C4100, unreferenced local parameter GTEST_DISABLE_MSC_WARNINGS_PUSH_(4244 4100) -#include "test/gmock-matchers_test.h" - namespace testing { namespace gmock_matchers_test { namespace { diff --git a/third_party/googletest/googlemock/test/gmock_link_test.h b/third_party/googletest/googlemock/test/gmock_link_test.h index cf0a985b..cb5179b2 100644 --- a/third_party/googletest/googlemock/test/gmock_link_test.h +++ b/third_party/googletest/googlemock/test/gmock_link_test.h @@ -186,8 +186,8 @@ using testing::SetErrnoAndReturn; #endif #if GTEST_HAS_EXCEPTIONS -using testing::Throw; using testing::Rethrow; +using testing::Throw; #endif using testing::ContainsRegex; diff --git a/third_party/googletest/googletest/README.md b/third_party/googletest/googletest/README.md index a7d9aa32..7c351747 100644 --- a/third_party/googletest/googletest/README.md +++ b/third_party/googletest/googletest/README.md @@ -25,7 +25,7 @@ When building GoogleTest as a standalone project, the typical workflow starts with ``` -git clone https://github.com/google/googletest.git -b v1.15.0 +git clone https://github.com/google/googletest.git -b v1.15.2 cd googletest # Main directory of the cloned repository. mkdir build # Create a directory to hold the build output. cd build diff --git a/third_party/googletest/googletest_deps.bzl b/third_party/googletest/googletest_deps.bzl index 65fc48c3..281af5c0 100644 --- a/third_party/googletest/googletest_deps.bzl +++ b/third_party/googletest/googletest_deps.bzl @@ -6,17 +6,17 @@ load("//:fake_fuchsia_sdk.bzl", "fake_fuchsia_sdk") def googletest_deps(): """Loads common dependencies needed to use the googletest library.""" - if not native.existing_rule("com_googlesource_code_re2"): + if not native.existing_rule("re2"): http_archive( - name = "com_googlesource_code_re2", + name = "re2", sha256 = "eb2df807c781601c14a260a507a5bb4509be1ee626024cb45acbd57cb9d4032b", strip_prefix = "re2-2024-07-02", urls = ["https://github.com/google/re2/releases/download/2024-07-02/re2-2024-07-02.tar.gz"], ) - if not native.existing_rule("com_google_absl"): + if not native.existing_rule("abseil-cpp"): http_archive( - name = "com_google_absl", + name = "abseil-cpp", sha256 = "733726b8c3a6d39a4120d7e45ea8b41a434cdacde401cba500f14236c49b39dc", strip_prefix = "abseil-cpp-20240116.2", urls = ["https://github.com/abseil/abseil-cpp/releases/download/20240116.2/abseil-cpp-20240116.2.tar.gz"], diff --git a/third_party/openexr/.github/workflows/ci_workflow.yml b/third_party/openexr/.github/workflows/ci_workflow.yml index dc6b67f3..e775a9fb 100644 --- a/third_party/openexr/.github/workflows/ci_workflow.yml +++ b/third_party/openexr/.github/workflows/ci_workflow.yml @@ -222,6 +222,8 @@ jobs: env: CXX: ${{ matrix.cxx-compiler }} CC: ${{ matrix.cc-compiler }} + ACTIONS_RUNNER_FORCE_ACTIONS_NODE_VERSION: node16 + ACTIONS_ALLOW_USE_UNSECURE_NODE_VERSION: true steps: - name: Checkout uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 # v3.6.0 @@ -230,8 +232,6 @@ jobs: mkdir _install mkdir _build mkdir _examples - - name: Install help2man - run: yum install -y help2man - name: Configure run: | cmake -B _build -S . \ @@ -242,7 +242,7 @@ jobs: -DCMAKE_VERBOSE_MAKEFILE:BOOL='OFF' \ -DBUILD_SHARED_LIBS=${{ matrix.build-shared }} \ -DOPENEXR_INSTALL_TOOLS='ON' \ - -DOPENEXR_INSTALL_DOCS='ON' \ + -DOPENEXR_INSTALL_DOCS='OFF' \ -DOPENEXR_RUN_FUZZ_TESTS='OFF' \ -DOPENEXR_ENABLE_THREADING=${{ matrix.threads-enabled }} - name: Build @@ -253,21 +253,6 @@ jobs: - name: Validate run: | share/ci/scripts/linux/validate_openexr_libs.sh _install - - name: Examples - run: | - # Make sure we can build the examples when configured as a - # standalone application linking against the just-installed - # OpenEXR library. - cmake ../src/examples \ - -DCMAKE_PREFIX_PATH=../../_install \ - -DCMAKE_BUILD_TYPE=${{ matrix.build-type }} \ - -DCMAKE_CXX_STANDARD=${{ matrix.cxx-standard }} \ - -DCMAKE_CXX_FLAGS=${{ matrix.cxx-flags }} - cmake --build . \ - --config ${{ matrix.build-type }} - # Confirm the examples program runs - ./bin/OpenEXRExamples - working-directory: _examples - name: Test run: | ctest -T Test ${{ matrix.exclude-tests }} \ diff --git a/third_party/openexr/.github/workflows/codeql.yml b/third_party/openexr/.github/workflows/codeql.yml index 39893fb0..6b3fb66c 100644 --- a/third_party/openexr/.github/workflows/codeql.yml +++ b/third_party/openexr/.github/workflows/codeql.yml @@ -59,8 +59,14 @@ jobs: # Initializes the CodeQL tools for scanning. - name: Initialize CodeQL - uses: github/codeql-action/init@v3 + # cary: Pin the version to the SHA for 2.18.0, since there appears to + # be a problem with 2.18.1 leading to a "No space left on + # device" failure + uses: github/codeql-action/init@5cf07d8b700b67e235fbb65cbc84f69c0cf10464 with: + # cary: the "linked" setting is necessary to force the run to pick up + # the version specified in the action. + tools: linked languages: ${{ matrix.language }} build-mode: ${{ matrix.build-mode }} # If you wish to specify custom queries, you can do so here or in a config file. @@ -86,6 +92,7 @@ jobs: exit 1 - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v3 + # Pin the version to the SHA for 2.18.0 + uses: github/codeql-action/analyze@5cf07d8b700b67e235fbb65cbc84f69c0cf10464 with: category: "/language:${{matrix.language}}" diff --git a/third_party/openexr/.github/workflows/ossfuzz_workflow.yml b/third_party/openexr/.github/workflows/ossfuzz_workflow.yml index 390cfe43..5854e2bb 100644 --- a/third_party/openexr/.github/workflows/ossfuzz_workflow.yml +++ b/third_party/openexr/.github/workflows/ossfuzz_workflow.yml @@ -48,7 +48,7 @@ jobs: dry-run: false language: c++ - name: Upload Crash - uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 + uses: actions/upload-artifact@834a144ee995460fba8ed112a2fc961b36a5ec5a # v4.3.6 if: failure() && steps.build.outcome == 'success' with: name: artifacts diff --git a/third_party/openexr/.github/workflows/python-wheels-publish-test.yml b/third_party/openexr/.github/workflows/python-wheels-publish-test.yml index b88ac866..214bedb6 100644 --- a/third_party/openexr/.github/workflows/python-wheels-publish-test.yml +++ b/third_party/openexr/.github/workflows/python-wheels-publish-test.yml @@ -36,7 +36,7 @@ jobs: uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - name: Install Python - uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5.1.0 + uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f # v5.1.1 with: python-version: '3.x' @@ -48,7 +48,7 @@ jobs: run: pipx run build --sdist . --outdir wheelhouse - name: Build wheel - uses: pypa/cibuildwheel@ba8be0d98853f5744f24e7f902c8adef7ae2e7f3 # v2.18.1 + uses: pypa/cibuildwheel@bd033a44476646b606efccdd5eed92d5ea1d77ad # v2.20.0 with: output-dir: wheelhouse env: @@ -62,7 +62,7 @@ jobs: CIBW_ENVIRONMENT: OPENEXR_RELEASE_CANDIDATE_TAG="${{ github.ref_name }}" - name: Upload artifact - uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 + uses: actions/upload-artifact@834a144ee995460fba8ed112a2fc961b36a5ec5a # v4.3.6 with: name: wheels-${{ matrix.os }} path: | @@ -84,21 +84,21 @@ jobs: steps: - name: Download Linux artifacts - uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7 + uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 with: name: wheels-ubuntu-latest path: dist - name: Download macOS artifacts - uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7 + uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 with: name: wheels-macos-latest path: dist - name: Download Windows artifacts - uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7 + uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 with: name: wheels-windows-latest path: dist - name: Publish distribution 📦 to TestPyPI - uses: pypa/gh-action-pypi-publish@81e9d935c883d0b210363ab89cf05f3894778450 # release/v1 + uses: pypa/gh-action-pypi-publish@ec4db0b4ddc65acdf4bff5fa45ac92d78b56bdf0 # release/v1 with: repository-url: https://test.pypi.org/legacy/ \ No newline at end of file diff --git a/third_party/openexr/.github/workflows/python-wheels-publish.yml b/third_party/openexr/.github/workflows/python-wheels-publish.yml index 5f5e4041..061c066c 100644 --- a/third_party/openexr/.github/workflows/python-wheels-publish.yml +++ b/third_party/openexr/.github/workflows/python-wheels-publish.yml @@ -32,7 +32,7 @@ jobs: uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - name: Install Python - uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5.1.0 + uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f # v5.1.1 with: python-version: '3.x' @@ -42,7 +42,7 @@ jobs: run: pipx run build --sdist . --outdir wheelhouse - name: Build wheel - uses: pypa/cibuildwheel@ba8be0d98853f5744f24e7f902c8adef7ae2e7f3 # v2.18.1 + uses: pypa/cibuildwheel@bd033a44476646b606efccdd5eed92d5ea1d77ad # v2.20.0 with: output-dir: wheelhouse env: @@ -56,7 +56,7 @@ jobs: CIBW_TEST_SKIP: "*arm64" - name: Upload artifact - uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 + uses: actions/upload-artifact@834a144ee995460fba8ed112a2fc961b36a5ec5a # v4.3.6 with: name: wheels-${{ matrix.os }} path: | @@ -78,19 +78,19 @@ jobs: steps: - name: Download Linux artifacts - uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7 + uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 with: name: wheels-ubuntu-latest path: dist - name: Download macOS artifacts - uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7 + uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 with: name: wheels-macos-latest path: dist - name: Download Windows artifacts - uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7 + uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 with: name: wheels-windows-latest path: dist - name: Publish distribution 📦 to PyPI - uses: pypa/gh-action-pypi-publish@81e9d935c883d0b210363ab89cf05f3894778450 # release/v1 + uses: pypa/gh-action-pypi-publish@ec4db0b4ddc65acdf4bff5fa45ac92d78b56bdf0 # release/v1 diff --git a/third_party/openexr/.github/workflows/python-wheels.yml b/third_party/openexr/.github/workflows/python-wheels.yml index 371bb82e..7f479df0 100644 --- a/third_party/openexr/.github/workflows/python-wheels.yml +++ b/third_party/openexr/.github/workflows/python-wheels.yml @@ -45,7 +45,7 @@ jobs: uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - name: Install Python - uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5.1.0 + uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f # v5.1.1 with: python-version: '3.x' @@ -55,7 +55,7 @@ jobs: run: pipx run build --sdist . --outdir wheelhouse - name: Build wheel - uses: pypa/cibuildwheel@ba8be0d98853f5744f24e7f902c8adef7ae2e7f3 # v2.18.1 + uses: pypa/cibuildwheel@bd033a44476646b606efccdd5eed92d5ea1d77ad # v2.20.0 env: CIBW_ARCHS_MACOS: x86_64 arm64 universal2 # Skip python 3.6 since scikit-build-core requires 3.7+ @@ -65,7 +65,7 @@ jobs: CIBW_TEST_SKIP: "*-macosx*arm64" - name: Upload artifact - uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 + uses: actions/upload-artifact@834a144ee995460fba8ed112a2fc961b36a5ec5a # v4.3.6 with: name: wheels-${{ matrix.os }} path: | diff --git a/third_party/openexr/.github/workflows/release-sign.yml b/third_party/openexr/.github/workflows/release-sign.yml index fd17ba13..515702ca 100644 --- a/third_party/openexr/.github/workflows/release-sign.yml +++ b/third_party/openexr/.github/workflows/release-sign.yml @@ -55,7 +55,7 @@ jobs: run: git archive --format=tar.gz -o ${OPENEXR_TARBALL} --prefix ${OPENEXR_PREFIX} ${TAG} - name: Sign archive with Sigstore - uses: sigstore/gh-action-sigstore-python@61f6a500bbfdd9a2a339cf033e5421951fbc1cd2 # v2.1.1 + uses: sigstore/gh-action-sigstore-python@f514d46b907ebcd5bedc05145c03b69c1edd8b46 # v3.0.0 with: inputs: ${{ env.OPENEXR_TARBALL }} diff --git a/third_party/openexr/.github/workflows/scorecard.yml b/third_party/openexr/.github/workflows/scorecard.yml index 86516297..1ff3992b 100644 --- a/third_party/openexr/.github/workflows/scorecard.yml +++ b/third_party/openexr/.github/workflows/scorecard.yml @@ -30,7 +30,7 @@ jobs: persist-credentials: false - name: "Run analysis" - uses: ossf/scorecard-action@dc50aa9510b46c811795eb24b2f1ba02a914e534 # v2.3.3 + uses: ossf/scorecard-action@62b2cac7ed8198b15735ed49ab1e5cf35480ba46 # v2.4.0 with: results_file: results.sarif results_format: sarif @@ -43,7 +43,7 @@ jobs: # Upload the results as artifacts (optional) - name: "Upload artifact" - uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 + uses: actions/upload-artifact@834a144ee995460fba8ed112a2fc961b36a5ec5a # v4.3.6 with: name: SARIF file path: results.sarif diff --git a/third_party/openexr/.github/workflows/snyk-scan-cron.yml b/third_party/openexr/.github/workflows/snyk-scan-cron.yml index 143cb73d..0daeeb7b 100644 --- a/third_party/openexr/.github/workflows/snyk-scan-cron.yml +++ b/third_party/openexr/.github/workflows/snyk-scan-cron.yml @@ -19,7 +19,7 @@ jobs: steps: - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - - uses: snyk/actions/setup@8349f9043a8b7f0f3ee8885bf28f0b388d2446e8 # master + - uses: snyk/actions/setup@ae9442546152ba9bb0a1c85e2672112c97e7a06d # master id: snyk - name: Snyk version diff --git a/third_party/openexr/MODULE.bazel b/third_party/openexr/MODULE.bazel index 2cf73ce4..5a189510 100644 --- a/third_party/openexr/MODULE.bazel +++ b/third_party/openexr/MODULE.bazel @@ -8,5 +8,5 @@ module( bazel_dep(name = "bazel_skylib", version = "1.6.1") bazel_dep(name = "imath", version = "3.1.11") -bazel_dep(name = "libdeflate", version = "1.20.bcr.1") +bazel_dep(name = "libdeflate", version = "1.21") bazel_dep(name = "platforms", version = "0.0.10") diff --git a/third_party/openexr/share/ci/scripts/linux/validate_openexr_libs.sh b/third_party/openexr/share/ci/scripts/linux/validate_openexr_libs.sh index d8bfe7ac..967a7fa1 100755 --- a/third_party/openexr/share/ci/scripts/linux/validate_openexr_libs.sh +++ b/third_party/openexr/share/ci/scripts/linux/validate_openexr_libs.sh @@ -24,7 +24,11 @@ fi set -x BUILD_ROOT=$1 -SRC_ROOT=$2 +if [[ $# == "2" ]]; then + SRC_ROOT=$2 +else + SRC_ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)/../../../.." +fi # Locate OpenEXR.pc and set PKG_CONFIG_PATH accordingly diff --git a/third_party/openexr/src/bin/CMakeLists.txt b/third_party/openexr/src/bin/CMakeLists.txt index 99785681..c70a90ef 100644 --- a/third_party/openexr/src/bin/CMakeLists.txt +++ b/third_party/openexr/src/bin/CMakeLists.txt @@ -11,6 +11,7 @@ add_subdirectory( exr2aces ) add_subdirectory( exrheader ) add_subdirectory( exrinfo ) add_subdirectory( exrmaketiled ) +add_subdirectory( exrmetrics ) add_subdirectory( exrstdattr ) add_subdirectory( exrmakepreview ) add_subdirectory( exrenvmap ) diff --git a/third_party/openexr/src/bin/exrmetrics/CMakeLists.txt b/third_party/openexr/src/bin/exrmetrics/CMakeLists.txt new file mode 100644 index 00000000..8739e316 --- /dev/null +++ b/third_party/openexr/src/bin/exrmetrics/CMakeLists.txt @@ -0,0 +1,14 @@ +# SPDX-License-Identifier: BSD-3-Clause +# Copyright (c) Contributors to the OpenEXR Project. + +add_executable(exrmetrics main.cpp exrmetrics.cpp) +target_link_libraries(exrmetrics OpenEXR::OpenEXR) +set_target_properties(exrmetrics PROPERTIES + RUNTIME_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/bin" +) +if(OPENEXR_INSTALL_TOOLS) + install(TARGETS exrmetrics DESTINATION ${CMAKE_INSTALL_BINDIR}) +endif() +if(WIN32 AND BUILD_SHARED_LIBS) + target_compile_definitions(exrmetrics PRIVATE OPENEXR_DLL) +endif() diff --git a/third_party/openexr/src/bin/exrmetrics/exrmetrics.cpp b/third_party/openexr/src/bin/exrmetrics/exrmetrics.cpp new file mode 100644 index 00000000..b587d495 --- /dev/null +++ b/third_party/openexr/src/bin/exrmetrics/exrmetrics.cpp @@ -0,0 +1,586 @@ + +// +// SPDX-License-Identifier: BSD-3-Clause +// Copyright (c) Contributors to the OpenEXR Project. +// + +#include "exrmetrics.h" + +#include "ImfChannelList.h" +#include "ImfDeepFrameBuffer.h" +#include "ImfDeepScanLineInputPart.h" +#include "ImfDeepScanLineOutputPart.h" +#include "ImfDeepTiledInputPart.h" +#include "ImfDeepTiledOutputPart.h" +#include "ImfHeader.h" +#include "ImfInputPart.h" +#include "ImfMisc.h" +#include "ImfMultiPartInputFile.h" +#include "ImfMultiPartOutputFile.h" +#include "ImfOutputPart.h" +#include "ImfPartType.h" +#include "ImfTiledInputPart.h" +#include "ImfTiledMisc.h" +#include "ImfTiledOutputPart.h" + +#include +#include +#include +#include +#include +#include + +using namespace Imf; +using Imath::Box2i; + +using std::cerr; +using namespace std::chrono; +using std::chrono::steady_clock; +using std::cout; +using std::endl; +using std::list; +using std::runtime_error; +using std::string; +using std::to_string; +using std::vector; + +double +timing (steady_clock::time_point start, steady_clock::time_point end) +{ + return std::chrono::duration(end-start).count(); +} + +int +channelCount (const Header& h) +{ + int channels = 0; + for (ChannelList::ConstIterator i = h.channels ().begin (); + i != h.channels ().end (); + ++i) + { + ++channels; + } + return channels; +} + +void +copyScanLine (InputPart& in, OutputPart& out) +{ + Box2i dw = in.header ().dataWindow (); + uint64_t width = dw.max.x + 1 - dw.min.x; + uint64_t height = dw.max.y + 1 - dw.min.y; + uint64_t numPixels = width * height; + int numChans = channelCount (in.header ()); + + vector> pixelData (numChans); + uint64_t offsetToOrigin = width * static_cast (dw.min.y) + + static_cast (dw.min.x); + + int channelNumber = 0; + int pixelSize = 0; + FrameBuffer buf; + + for (ChannelList::ConstIterator i = out.header ().channels ().begin (); + i != out.header ().channels ().end (); + ++i) + { + int samplesize = pixelTypeSize (i.channel ().type); + pixelData[channelNumber].resize (numPixels * samplesize); + + buf.insert ( + i.name (), + Slice ( + i.channel ().type, + pixelData[channelNumber].data () - offsetToOrigin * samplesize, + samplesize, + samplesize * width)); + ++channelNumber; + pixelSize += samplesize; + } + + in.setFrameBuffer (buf); + out.setFrameBuffer (buf); + + steady_clock::time_point startRead = steady_clock::now(); + in.readPixels (dw.min.y, dw.max.y); + steady_clock::time_point endRead = steady_clock::now(); + + steady_clock::time_point startWrite = steady_clock::now(); + out.writePixels (height); + steady_clock::time_point endWrite = steady_clock::now(); + + cout << " \"read time\": " << timing (startRead, endRead) << ",\n"; + cout << " \"write time\": " << timing (startWrite, endWrite) << ",\n"; + cout << " \"pixel count\": " << numPixels << ",\n"; + cout << " \"raw size\": " << numPixels * pixelSize << ",\n"; +} + +void +copyTiled (TiledInputPart& in, TiledOutputPart& out) +{ + int numChans = channelCount (in.header ()); + TileDescription tiling = in.header ().tileDescription (); + + Box2i imageDw = in.header ().dataWindow (); + int totalLevels; + switch (tiling.mode) + { + case ONE_LEVEL: totalLevels = 1; //break; + case MIPMAP_LEVELS: totalLevels = in.numLevels (); break; + case RIPMAP_LEVELS: + totalLevels = in.numXLevels () * in.numYLevels (); + break; + case NUM_LEVELMODES: throw runtime_error ("unknown tile mode"); + } + + vector>> pixelData (totalLevels); + vector frameBuffer (totalLevels); + + int levelIndex = 0; + int pixelSize = 0; + size_t totalPixels = 0; + + // + // allocate memory and initialize frameBuffer for each level + // + for (int xLevel = 0; xLevel < in.numXLevels (); ++xLevel) + { + for (int yLevel = 0; yLevel < in.numYLevels (); ++yLevel) + { + if (tiling.mode == RIPMAP_LEVELS || xLevel == yLevel) + { + Box2i dw = dataWindowForLevel ( + tiling, + imageDw.min.x, + imageDw.max.x, + imageDw.min.y, + imageDw.max.y, + xLevel, + yLevel); + uint64_t width = dw.max.x + 1 - dw.min.x; + uint64_t height = dw.max.y + 1 - dw.min.y; + uint64_t numPixels = width * height; + uint64_t offsetToOrigin = + width * static_cast (dw.min.y) + + static_cast (dw.min.x); + int channelNumber = 0; + pixelSize = 0; + + pixelData[levelIndex].resize (numChans); + + for (ChannelList::ConstIterator i = + out.header ().channels ().begin (); + i != out.header ().channels ().end (); + ++i) + { + int samplesize = pixelTypeSize (i.channel ().type); + pixelData[levelIndex][channelNumber].resize ( + numPixels * samplesize); + + frameBuffer[levelIndex].insert ( + i.name (), + Slice ( + i.channel ().type, + pixelData[levelIndex][channelNumber].data () - + offsetToOrigin * samplesize, + samplesize, + samplesize * width)); + ++channelNumber; + pixelSize += samplesize; + } + totalPixels += numPixels; + ++levelIndex; + } + } + } + + steady_clock::time_point startRead = steady_clock::now(); + levelIndex = 0; + + for (int xLevel = 0; xLevel < in.numXLevels (); ++xLevel) + { + for (int yLevel = 0; yLevel < in.numYLevels (); ++yLevel) + { + if (tiling.mode == RIPMAP_LEVELS || xLevel == yLevel) + { + in.setFrameBuffer (frameBuffer[levelIndex]); + in.readTiles ( + 0, + in.numXTiles (xLevel) - 1, + 0, + in.numYTiles (yLevel) - 1, + xLevel, + yLevel); + ++levelIndex; + } + } + } + + steady_clock::time_point endRead = steady_clock::now(); + + steady_clock::time_point startWrite = steady_clock::now(); + levelIndex = 0; + int tileCount = 0; + + for (int xLevel = 0; xLevel < in.numXLevels (); ++xLevel) + { + for (int yLevel = 0; yLevel < in.numYLevels (); ++yLevel) + { + if (tiling.mode == RIPMAP_LEVELS || xLevel == yLevel) + { + out.setFrameBuffer (frameBuffer[levelIndex]); + out.writeTiles ( + 0, + in.numXTiles (xLevel) - 1, + 0, + in.numYTiles (yLevel) - 1, + xLevel, + yLevel); + tileCount += in.numXTiles (xLevel) * in.numYTiles (yLevel); + ++levelIndex; + } + } + } + steady_clock::time_point endWrite = steady_clock::now(); + + cout << " \"read time\": " << timing (startRead, endRead) << ",\n"; + cout << " \"write time\": " << timing (startWrite, endWrite) << ",\n"; + cout << " \"total tiles\": " << tileCount << ",\n"; + cout << " \"pixel count\": " << totalPixels << ",\n"; + cout << " \"raw size\": " << totalPixels * pixelSize << ",\n"; +} + +void +copyDeepScanLine (DeepScanLineInputPart& in, DeepScanLineOutputPart& out) +{ + Box2i dw = in.header ().dataWindow (); + uint64_t width = dw.max.x + 1 - dw.min.x; + uint64_t height = dw.max.y + 1 - dw.min.y; + uint64_t numPixels = width * height; + int numChans = channelCount (in.header ()); + vector sampleCount (numPixels); + + uint64_t offsetToOrigin = width * static_cast (dw.min.y) + + static_cast (dw.min.x); + vector> pixelPtrs (numChans); + + DeepFrameBuffer buffer; + + buffer.insertSampleCountSlice (Slice ( + UINT, + (char*) (sampleCount.data () - offsetToOrigin), + sizeof (int), + sizeof (int) * width)); + int channelNumber = 0; + int bytesPerSample = 0; + for (ChannelList::ConstIterator i = out.header ().channels ().begin (); + i != out.header ().channels ().end (); + ++i) + { + pixelPtrs[channelNumber].resize (numPixels); + int samplesize = pixelTypeSize (i.channel ().type); + buffer.insert ( + i.name (), + DeepSlice ( + i.channel ().type, + (char*) (pixelPtrs[channelNumber].data () - offsetToOrigin), + sizeof (char*), + sizeof (char*) * width, + samplesize)); + ++channelNumber; + bytesPerSample += samplesize; + } + + in.setFrameBuffer (buffer); + out.setFrameBuffer (buffer); + + steady_clock::time_point startCountRead = steady_clock::now(); + in.readPixelSampleCounts (dw.min.y, dw.max.y); + steady_clock::time_point endCountRead = steady_clock::now(); + + size_t totalSamples = 0; + + for (int i: sampleCount) + { + totalSamples += i; + } + + vector> sampleData (numChans); + channelNumber = 0; + for (ChannelList::ConstIterator i = in.header ().channels ().begin (); + i != in.header ().channels ().end (); + ++i) + { + int samplesize = pixelTypeSize (i.channel ().type); + sampleData[channelNumber].resize (samplesize * totalSamples); + int offset = 0; + for (int p = 0; p < numPixels; ++p) + { + pixelPtrs[channelNumber][p] = + sampleData[channelNumber].data () + offset * samplesize; + offset += sampleCount[p]; + } + + ++channelNumber; + } + + steady_clock::time_point startSampleRead = steady_clock::now(); + in.readPixels (dw.min.y, dw.max.y); + steady_clock::time_point endSampleRead = steady_clock::now(); + + + steady_clock::time_point startWrite = steady_clock::now(); + out.writePixels (height); + steady_clock::time_point endWrite = steady_clock::now(); + + + cout << " \"count read time\": " << timing (startCountRead, endCountRead) + << ",\n"; + cout << " \"sample read time\": " + << timing (startSampleRead, endSampleRead) << ",\n"; + cout << " \"write time\": " << timing (startWrite, endWrite) << ",\n"; + cout << " \"pixel count\": " << numPixels << ",\n"; + cout << " \"raw size\": " + << totalSamples * bytesPerSample + numPixels * sizeof (int) << ",\n"; +} + +void +copyDeepTiled (DeepTiledInputPart& in, DeepTiledOutputPart& out) +{ + + TileDescription tiling = in.header ().tileDescription (); + + if (tiling.mode == MIPMAP_LEVELS) + { + throw runtime_error ( + "exrmetrics does not support mipmapped deep tiled parts"); + } + + if (tiling.mode == RIPMAP_LEVELS) + { + throw runtime_error ( + "exrmetrics does not support ripmapped deep tiled parts"); + } + + Box2i dw = in.header ().dataWindow (); + uint64_t width = dw.max.x + 1 - dw.min.x; + uint64_t height = dw.max.y + 1 - dw.min.y; + uint64_t numPixels = width * height; + int numChans = channelCount (in.header ()); + vector sampleCount (numPixels); + + uint64_t offsetToOrigin = width * static_cast (dw.min.y) + + static_cast (dw.min.x); + vector> pixelPtrs (numChans); + + DeepFrameBuffer buffer; + + buffer.insertSampleCountSlice (Slice ( + UINT, + (char*) (sampleCount.data () - offsetToOrigin), + sizeof (int), + sizeof (int) * width)); + int channelNumber = 0; + int bytesPerSample = 0; + for (ChannelList::ConstIterator i = out.header ().channels ().begin (); + i != out.header ().channels ().end (); + ++i) + { + pixelPtrs[channelNumber].resize (numPixels); + int samplesize = pixelTypeSize (i.channel ().type); + buffer.insert ( + i.name (), + DeepSlice ( + i.channel ().type, + (char*) (pixelPtrs[channelNumber].data () - offsetToOrigin), + sizeof (char*), + sizeof (char*) * width, + samplesize)); + ++channelNumber; + bytesPerSample += samplesize; + } + + in.setFrameBuffer (buffer); + out.setFrameBuffer (buffer); + + steady_clock::time_point startCountRead = steady_clock::now(); + + in.readPixelSampleCounts ( + 0, in.numXTiles (0) - 1, 0, in.numYTiles (0) - 1, 0, 0); + steady_clock::time_point endCountRead = steady_clock::now(); + + + size_t totalSamples = 0; + + for (int i: sampleCount) + { + totalSamples += i; + } + + vector> sampleData (numChans); + channelNumber = 0; + for (ChannelList::ConstIterator i = in.header ().channels ().begin (); + i != in.header ().channels ().end (); + ++i) + { + int samplesize = pixelTypeSize (i.channel ().type); + sampleData[channelNumber].resize (samplesize * totalSamples); + int offset = 0; + for (int p = 0; p < numPixels; ++p) + { + pixelPtrs[channelNumber][p] = + sampleData[channelNumber].data () + offset * samplesize; + offset += sampleCount[p]; + } + + ++channelNumber; + } + + steady_clock::time_point startSampleRead = steady_clock::now(); + in.readTiles (0, in.numXTiles (0) - 1, 0, in.numYTiles (0) - 1, 0, 0); + steady_clock::time_point endSampleRead = steady_clock::now(); + + steady_clock::time_point startWrite = steady_clock::now(); + out.writeTiles (0, in.numXTiles (0) - 1, 0, in.numYTiles (0) - 1, 0, 0); + steady_clock::time_point endWrite = steady_clock::now(); + + + cout << " \"count read time\": " << timing (startCountRead, endCountRead) + << ",\n"; + cout << " \"sample read time\": " + << timing (startSampleRead, endSampleRead) << ",\n"; + cout << " \"write time\": " << timing (startWrite, endWrite) << ",\n"; + cout << " \"pixel count\": " << numPixels << ",\n"; + cout << " \"raw size\": " + << totalSamples * bytesPerSample + numPixels * sizeof (int) << ",\n"; +} + +void +exrmetrics ( + const char inFileName[], + const char outFileName[], + int part, + Imf::Compression compression, + float level, + int halfMode) +{ + MultiPartInputFile in (inFileName); + if (part >= in.parts ()) + { + throw runtime_error ((string (inFileName) + " only contains " + + to_string (in.parts ()) + + " parts. Cannot copy part " + to_string (part)) + .c_str ()); + } + Header outHeader = in.header (part); + + if (compression < NUM_COMPRESSION_METHODS) + { + outHeader.compression () = compression; + } + else { compression = outHeader.compression (); } + + if (!isinf (level) && level >= -1) + { + switch (outHeader.compression ()) + { + case DWAA_COMPRESSION: + case DWAB_COMPRESSION: + outHeader.dwaCompressionLevel () = level; + break; + case ZIP_COMPRESSION: + case ZIPS_COMPRESSION: + outHeader.zipCompressionLevel () = level; + break; + // case ZSTD_COMPRESSION : + // outHeader.zstdCompressionLevel()=level; + // break; + default: + throw runtime_error ( + "-l option only works for DWAA/DWAB,ZIP/ZIPS or ZSTD compression"); + } + } + + if (halfMode > 0) + { + for (ChannelList::Iterator i = outHeader.channels ().begin (); + i != outHeader.channels ().end (); + ++i) + { + if (halfMode == 2 || !strcmp (i.name (), "R") || + !strcmp (i.name (), "G") || !strcmp (i.name (), "B") || + !strcmp (i.name (), "A")) + { + i.channel ().type = HALF; + } + } + } + + string inCompress, outCompress; + getCompressionNameFromId (in.header (part).compression (), inCompress); + getCompressionNameFromId (outHeader.compression (), outCompress); + cout << "{\n"; + cout << " \"input compression\": \"" << inCompress << "\",\n"; + cout << " \"output compression\": \"" << outCompress << "\",\n"; + if (compression == ZIP_COMPRESSION || compression == ZIPS_COMPRESSION) + { + cout << " \"zipCompressionLevel\": " + << outHeader.zipCompressionLevel () << ",\n"; + } + + if (compression == DWAA_COMPRESSION || compression == DWAB_COMPRESSION) + { + cout << " \"dwaCompressionLevel\": " + << outHeader.dwaCompressionLevel () << ",\n"; + } + + std::string type = outHeader.type (); + cout << " \"part type\": \"" << type << "\",\n"; + + if (type == SCANLINEIMAGE) + { + cout << " \"scanlines per chunk:\" : " + << getCompressionNumScanlines (compression) << ",\n"; + } + + { + MultiPartOutputFile out (outFileName, &outHeader, 1); + + if (type == TILEDIMAGE) + { + TiledInputPart inpart (in, part); + TiledOutputPart outpart (out, 0); + copyTiled (inpart, outpart); + } + else if (type == SCANLINEIMAGE) + { + InputPart inpart (in, part); + OutputPart outpart (out, 0); + copyScanLine (inpart, outpart); + } + else if (type == DEEPSCANLINE) + { + DeepScanLineInputPart inpart (in, part); + DeepScanLineOutputPart outpart (out, 0); + copyDeepScanLine (inpart, outpart); + } + else if (type == DEEPTILE) + { + DeepTiledInputPart inpart (in, part); + DeepTiledOutputPart outpart (out, 0); + copyDeepTiled (inpart, outpart); + } + else + { + throw runtime_error ( + (inFileName + string (" contains unknown part type ") + type) + .c_str ()); + } + } + struct stat instats, outstats; + stat (inFileName, &instats); + stat (outFileName, &outstats); + cout << " \"input file size\": " << instats.st_size << ",\n"; + cout << " \"output file size\": " << outstats.st_size << "\n"; + cout << "}\n"; +} diff --git a/third_party/openexr/src/bin/exrmetrics/exrmetrics.h b/third_party/openexr/src/bin/exrmetrics/exrmetrics.h new file mode 100644 index 00000000..8a97085d --- /dev/null +++ b/third_party/openexr/src/bin/exrmetrics/exrmetrics.h @@ -0,0 +1,20 @@ + +#ifndef INCLUDED_EXR_METRICS_H +#define INCLUDED_EXR_METRICS_H + +//---------------------------------------------------------------------------- +// +// Copy input to output, reporting file size and timing +// +//---------------------------------------------------------------------------- + +#include "ImfCompression.h" + +void exrmetrics ( + const char inFileName[], + const char outFileName[], + int part, + Imf::Compression compression, + float level, + int halfMode); +#endif diff --git a/third_party/openexr/src/bin/exrmetrics/main.cpp b/third_party/openexr/src/bin/exrmetrics/main.cpp new file mode 100644 index 00000000..a7d0a13b --- /dev/null +++ b/third_party/openexr/src/bin/exrmetrics/main.cpp @@ -0,0 +1,199 @@ + +// +// SPDX-License-Identifier: BSD-3-Clause +// Copyright (c) Contributors to the OpenEXR Project. +// + +#include "exrmetrics.h" + +#include "ImfCompression.h" +#include "ImfMisc.h" + +#include +#include + +#include +#include +#include + +using std::cerr; +using std::cout; +using std::endl; +using std::ostream; +using std::vector; +using namespace Imf; + +void +usageMessage (ostream& stream, const char* program_name, bool verbose = false) +{ + stream << "Usage: " << program_name << " [options] infile outfile" << endl; + + if (verbose) + { + std::string compressionNames; + getCompressionNamesString ("/", compressionNames); + stream + << "Read an OpenEXR image from infile, write an identical copy to outfile" + " reporting time taken to read/write and file sizes.\n" + "\n" + "Options:\n" + "\n" + " -p n part number to copy (only one part will be written to output file)\n" + " default is part 0\n" + "\n" + " -l level set DWA or ZIP compression level\n" + "\n" + " -z x sets the data compression method to x\n" + " (" + << compressionNames.c_str () + << ",\n" + " default retains original method)\n" + "\n" + " -16 rgba|all force 16 bit half float: either just RGBA, or all channels\n" + " default retains original type for all channels\n" + "\n" + " -h, --help print this message\n" + "\n" + " --version print version information\n" + "\n"; + } +} + +int +main (int argc, char** argv) +{ + + const char* outFile = nullptr; + const char* inFile = nullptr; + int part = 0; + float level = INFINITY; + int halfMode = 0; // 0 - leave alone, 1 - just RGBA, 2 - everything + Compression compression = Compression::NUM_COMPRESSION_METHODS; + + int i = 1; + + if (argc == 1) + { + usageMessage (cerr, "exrmetrics", true); + return 1; + } + + while (i < argc) + { + if (!strcmp (argv[i], "-h") || !strcmp (argv[i], "--help")) + { + usageMessage (cout, "exrmetrics", true); + return 0; + } + + else if (!strcmp (argv[i], "--version")) + { + const char* libraryVersion = getLibraryVersion (); + + cout << "exrmetrics (OpenEXR) " << OPENEXR_VERSION_STRING; + if (strcmp (libraryVersion, OPENEXR_VERSION_STRING)) + cout << "(OpenEXR version " << libraryVersion << ")"; + cout << " https://openexr.com" << endl; + cout << "Copyright (c) Contributors to the OpenEXR Project" << endl; + cout << "License BSD-3-Clause" << endl; + return 0; + } + else if (!strcmp (argv[i], "-z")) + { + if (i > argc - 2) + { + cerr << "Missing compression value with -z option\n"; + return 1; + } + + getCompressionIdFromName (argv[i + 1], compression); + if (compression == Compression::NUM_COMPRESSION_METHODS) + { + cerr << "unknown compression type " << argv[i + 1] << endl; + return 1; + } + i += 2; + } + else if (!strcmp (argv[i], "-p")) + { + if (i > argc - 2) + { + cerr << "Missing part number with -p option\n"; + return 1; + } + part = atoi (argv[i + 1]); + if (part < 0) + { + cerr << "bad part " << part << " specified to -p option\n"; + return 1; + } + + i += 2; + } + else if (!strcmp (argv[i], "-l")) + { + if (i > argc - 2) + { + cerr << "Missing compression level number with -l option\n"; + return 1; + } + level = atof (argv[i + 1]); + if (level < 0) + { + cerr << "bad level " << level << " specified to -l option\n"; + return 1; + } + + i += 2; + } + else if (!strcmp (argv[i], "-16")) + { + if (i > argc - 2) + { + cerr << "Missing mode with -16 option\n"; + return 1; + } + if (!strcmp (argv[i + 1], "all")) { halfMode = 2; } + else if (!strcmp (argv[i + 1], "rgba")) { halfMode = 1; } + else + { + cerr << " bad mode for -16 option: must be 'all' or 'rgba'\n"; + return 1; + } + i += 2; + } + else if (!inFile) + { + inFile = argv[i]; + i += 1; + } + else if (!outFile) + { + outFile = argv[i]; + i += 1; + } + else + { + cerr << "unknown argument or extra filename specified\n"; + usageMessage (cerr, "exrmetrics", false); + return 1; + } + } + if (!inFile || !outFile) + { + cerr << "Missing input or output file\n"; + usageMessage (cerr, "exrmetrics", false); + return 1; + } + + try + { + exrmetrics (inFile, outFile, part, compression, level, halfMode); + } + catch (std::exception& what) + { + cerr << "error from exrmetrics: " << what.what () << endl; + return 1; + } + return 0; +} diff --git a/third_party/openexr/src/test/bin/CMakeLists.txt b/third_party/openexr/src/test/bin/CMakeLists.txt index 32a204ad..49c89928 100644 --- a/third_party/openexr/src/test/bin/CMakeLists.txt +++ b/third_party/openexr/src/test/bin/CMakeLists.txt @@ -67,6 +67,7 @@ if(BUILD_TESTING) exrmultiview exrmultipart exrstdattr + exrmetrics ) foreach(test ${tests}) diff --git a/third_party/openexr/src/test/bin/test_exrmetrics.py b/third_party/openexr/src/test/bin/test_exrmetrics.py new file mode 100644 index 00000000..d6f5b14a --- /dev/null +++ b/third_party/openexr/src/test/bin/test_exrmetrics.py @@ -0,0 +1,74 @@ +#!/usr/bin/env python + +# SPDX-License-Identifier: BSD-3-Clause +# Copyright (c) Contributors to the OpenEXR Project. + +import sys, os, tempfile, atexit, json +from subprocess import PIPE, run + +print(f"testing exrmetrics: {' '.join(sys.argv)}") + +exrmetrics = sys.argv[1] +image_dir = sys.argv[3] +version = sys.argv[4] + +assert(os.path.isfile(exrmetrics)), "\nMissing " + exrmetrics +assert(os.path.isdir(image_dir)), "\nMissing " + image_dir + +fd, outimage = tempfile.mkstemp(".exr") +os.close(fd) + +def cleanup(): + print(f"deleting {outimage}") +atexit.register(cleanup) + +# no args = usage message +result = run ([exrmetrics], stdout=PIPE, stderr=PIPE, universal_newlines=True) +print(" ".join(result.args)) +assert(result.returncode != 0), "\n"+result.stderr +assert(result.stderr.startswith ("Usage: ")), "\n"+result.stderr + +# -h = usage message +result = run ([exrmetrics, "-h"], stdout=PIPE, stderr=PIPE, universal_newlines=True) +print(" ".join(result.args)) +assert(result.returncode == 0), "\n"+result.stderr +assert(result.stdout.startswith ("Usage: ")), "\n"+result.stdout + +result = run ([exrmetrics, "--help"], stdout=PIPE, stderr=PIPE, universal_newlines=True) +print(" ".join(result.args)) +assert(result.returncode == 0), "\n"+result.stderr +assert(result.stdout.startswith ("Usage: ")), "\n"+result.stdout + +# --version +result = run ([exrmetrics, "--version"], stdout=PIPE, stderr=PIPE, universal_newlines=True) +print(" ".join(result.args)) +assert(result.returncode == 0), "\n"+result.stderr +assert(result.stdout.startswith ("exrmetrics")), "\n"+result.stdout +assert(version in result.stdout), "\n"+result.stdout + +# test missing arguments, using just the -option but no value + +for a in ["-p","-l","-16","-z"]: + result = run ([exrmetrics, a], stdout=PIPE, stderr=PIPE, universal_newlines=True) + print(" ".join(result.args)) + print(result.stderr) + assert(result.returncode != 0), "\n"+result.stderr + +command = [exrmetrics] +image = f"{image_dir}/TestImages/GrayRampsHorizontal.exr" +command += [image, outimage] + +result = run (command, stdout=PIPE, stderr=PIPE, universal_newlines=True) +print(" ".join(result.args)) +print(result.returncode) +print(result.stdout) +print(result.stderr) +assert(result.returncode == 0), "\n"+result.stderr +assert(os.path.isfile(outimage)), "\nMissing " + outimage + +# confirm data is valid JSON (will not be true if filename contains quotes) +data = json.loads(result.stdout) +for x in ['write time','output file size','input file size']: + assert(x in data),"\n Missing field "+x + +print("success") diff --git a/third_party/openexr/src/wrappers/python/Imath.py b/third_party/openexr/src/wrappers/python/Imath.py index 3f551e37..a3fccc26 100644 --- a/third_party/openexr/src/wrappers/python/Imath.py +++ b/third_party/openexr/src/wrappers/python/Imath.py @@ -164,12 +164,18 @@ class Channel: >>> import Imath >>> print Imath.Channel(Imath.PixelType(Imath.PixelType.FLOAT), 4, 4) FLOAT (4, 4) + >>> print Imath.Channel(Imath.PixelType.FLOAT, 4, 4) + Traceback (most recent call last): + ... + TypeError: type needs to be a PixelType. """ def __init__(self, type = PixelType(PixelType.HALF), xSampling = 1, ySampling = 1): self.type = type self.xSampling = xSampling self.ySampling = ySampling + if not isinstance(self.type, PixelType): + raise TypeError("type needs to be a PixelType.") def __repr__(self): return repr(self.type) + " " + repr((self.xSampling, self.ySampling)) def __eq__(self, other): diff --git a/third_party/openexr/src/wrappers/python/README.md b/third_party/openexr/src/wrappers/python/README.md index 9d5df896..26d0e868 100644 --- a/third_party/openexr/src/wrappers/python/README.md +++ b/third_party/openexr/src/wrappers/python/README.md @@ -74,17 +74,18 @@ for more information. The "hello, world" image writer: - import OpenEXR + import OpenEXR, Imath + from array import array width = 10 height = 10 size = width * height h = OpenEXR.Header(width,height) - h['channels'] = {'R' : Imath.Channel(FLOAT), - 'G' : Imath.Channel(FLOAT), - 'B' : Imath.Channel(FLOAT), - 'A' : Imath.Channel(FLOAT)} + h['channels'] = {'R' : Imath.Channel(Imath.PixelType(Imath.PixelType.FLOAT)), + 'G' : Imath.Channel(Imath.PixelType(Imath.PixelType.FLOAT)), + 'B' : Imath.Channel(Imath.PixelType(Imath.PixelType.FLOAT)), + 'A' : Imath.Channel(Imath.PixelType(Imath.PixelType.FLOAT))} o = OpenEXR.OutputFile("hello.exr", h) r = array('f', [n for n in range(size*0,size*1)]).tobytes() g = array('f', [n for n in range(size*1,size*2)]).tobytes() diff --git a/third_party/openexr/src/wrappers/python/tests/test_unittest.py b/third_party/openexr/src/wrappers/python/tests/test_unittest.py index e08a44b0..96f06d33 100644 --- a/third_party/openexr/src/wrappers/python/tests/test_unittest.py +++ b/third_party/openexr/src/wrappers/python/tests/test_unittest.py @@ -197,6 +197,13 @@ def test_invalid_pixeltype(): else: assert 0 + try: + Imath.Channel(FLOAT) + except: + pass + else: + assert 0 + print("invalid pixeltype ok") testList.append(("test_invalid_pixeltype", test_invalid_pixeltype)) diff --git a/third_party/rules_pkg-0.9.1/MODULE.bazel b/third_party/rules_pkg-0.9.1/MODULE.bazel deleted file mode 100644 index cffbf62a..00000000 --- a/third_party/rules_pkg-0.9.1/MODULE.bazel +++ /dev/null @@ -1,15 +0,0 @@ -module( - name = "rules_pkg", - version = "0.9.1", # Must sync with version.bzl. - repo_name = "rules_pkg", - compatibility_level = 1, -) - -# Do not update to newer versions until you need a specific new feature. -bazel_dep(name = "rules_license", version = "0.0.4") -bazel_dep(name = "bazel_skylib", version = "1.2.0") -bazel_dep(name = "rules_python", version = "0.10.2") - -# Only for development -bazel_dep(name = "platforms", version = "0.0.5", dev_dependency = True) -bazel_dep(name = "stardoc", version = "0.5.3", dev_dependency = True) diff --git a/third_party/rules_pkg-0.9.1/install.bzl b/third_party/rules_pkg-0.9.1/install.bzl deleted file mode 100644 index 9106a44f..00000000 --- a/third_party/rules_pkg-0.9.1/install.bzl +++ /dev/null @@ -1,20 +0,0 @@ -# Copyright 2021 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -load( - "//pkg:install.bzl", - _pkg_install = "pkg_install" -) - -pkg_install = _pkg_install diff --git a/third_party/rules_pkg-0.9.1/package_variables.bzl b/third_party/rules_pkg-0.9.1/package_variables.bzl deleted file mode 100644 index fb77d07e..00000000 --- a/third_party/rules_pkg-0.9.1/package_variables.bzl +++ /dev/null @@ -1,18 +0,0 @@ -# Copyright 2021 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -load("//pkg/package_variables.bzl", - _add_ctx_variables = "add_ctx_variables", -) -add_ctx_variables = _add_ctx_variables diff --git a/third_party/rules_pkg-0.9.1/providers.bzl b/third_party/rules_pkg-0.9.1/providers.bzl deleted file mode 100644 index 5ef7bc6e..00000000 --- a/third_party/rules_pkg-0.9.1/providers.bzl +++ /dev/null @@ -1,29 +0,0 @@ -# Copyright 2021 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -load("//pkg:providers.bzl", - _PackageArtifactInfo = "PackageArtifactInfo", - _PackageDirsInfo = "PackageDirsInfo", - _PackageFilegroupInfo = "PackageFilegroupInfo", - _PackageFilesInfo = "PackageFilesInfo", - _PackageSymlinkInfo = "PackageSymlinkInfo", - _PackageVariablesInfo = "PackageVariablesInfo", -) - -PackageArtifactInfo = _PackageArtifactInfo -PackageDirsInfo = _PackageDirsInfo -PackageFilegroupInfo = _PackageFilegroupInfo -PackageFilesInfo = _PackageFilesInfo -PackageSymlinkInfo = _PackageSymlinkInfo -PackageVariablesInfo = _PackageVariablesInfo diff --git a/third_party/rules_pkg-0.9.1/rpm.bzl b/third_party/rules_pkg-0.9.1/rpm.bzl deleted file mode 100644 index c26ecef4..00000000 --- a/third_party/rules_pkg-0.9.1/rpm.bzl +++ /dev/null @@ -1,18 +0,0 @@ -# Copyright 2021 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -load("//pkg:rpm.bzl", - _pkg_rpm = "pkg_rpm", -) -pkg_rpm = _pkg_rpm diff --git a/third_party/rules_pkg-0.9.1/toolchains/rpm/rpmbuild_configure.bzl b/third_party/rules_pkg-0.9.1/toolchains/rpm/rpmbuild_configure.bzl deleted file mode 100644 index f50a9ad8..00000000 --- a/third_party/rules_pkg-0.9.1/toolchains/rpm/rpmbuild_configure.bzl +++ /dev/null @@ -1,63 +0,0 @@ -# Copyright 2020 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Repository rule to autoconfigure a toolchain using the system rpmbuild.""" - -def _write_build(rctx, path, version): - if not path: - path = "" - rctx.template( - "BUILD", - Label("//toolchains/rpm:BUILD.tpl"), - substitutions = { - "{GENERATOR}": "@rules_pkg//toolchains/rpm/rpmbuild_configure.bzl%find_system_rpmbuild", - "{RPMBUILD_PATH}": str(path), - "{RPMBUILD_VERSION}": version, - }, - executable = False, - ) - -def _find_system_rpmbuild_impl(rctx): - rpmbuild_path = rctx.which("rpmbuild") - if rctx.attr.verbose: - if rpmbuild_path: - print("Found rpmbuild at '%s'" % rpmbuild_path) # buildifier: disable=print - else: - print("No system rpmbuild found.") # buildifier: disable=print - version = "unknown" - if rpmbuild_path: - res = rctx.execute([rpmbuild_path, "--version"]) - if res.return_code == 0: - # expect stdout like: RPM version 4.16.1.2 - parts = res.stdout.strip().split(" ") - if parts[0] == "RPM" and parts[1] == "version": - version = parts[2] - _write_build(rctx = rctx, path = rpmbuild_path, version = version) - -_find_system_rpmbuild = repository_rule( - implementation = _find_system_rpmbuild_impl, - doc = """Create a repository that defines an rpmbuild toolchain based on the system rpmbuild.""", - local = True, - environ = ["PATH"], - attrs = { - "verbose": attr.bool( - doc = "If true, print status messages.", - ), - }, -) - -def find_system_rpmbuild(name, verbose=False): - _find_system_rpmbuild(name=name, verbose=verbose) - native.register_toolchains( - "@%s//:rpmbuild_auto_toolchain" % name, - "@rules_pkg//toolchains/rpm:rpmbuild_missing_toolchain") diff --git a/third_party/rules_pkg-0.9.1/BUILD b/third_party/rules_pkg-1.0.1/BUILD similarity index 95% rename from third_party/rules_pkg-0.9.1/BUILD rename to third_party/rules_pkg-1.0.1/BUILD index 959335cc..5d05b041 100644 --- a/third_party/rules_pkg-0.9.1/BUILD +++ b/third_party/rules_pkg-1.0.1/BUILD @@ -22,7 +22,7 @@ package( license( name = "license", license_kinds = [ - "@rules_license//licenses/spdx:Apache-2.0" + "@rules_license//licenses/spdx:Apache-2.0", ], license_text = "LICENSE", ) diff --git a/third_party/rules_pkg-0.9.1/LICENSE b/third_party/rules_pkg-1.0.1/LICENSE similarity index 100% rename from third_party/rules_pkg-0.9.1/LICENSE rename to third_party/rules_pkg-1.0.1/LICENSE diff --git a/third_party/rules_pkg-1.0.1/MODULE.bazel b/third_party/rules_pkg-1.0.1/MODULE.bazel new file mode 100644 index 00000000..50f75909 --- /dev/null +++ b/third_party/rules_pkg-1.0.1/MODULE.bazel @@ -0,0 +1,25 @@ +module( + name = "rules_pkg", + version = "1.0.1", # Must sync with version.bzl. + compatibility_level = 1, + repo_name = "rules_pkg", +) + +# Do not update to newer versions until you need a specific new feature. +bazel_dep(name = "rules_license", version = "0.0.7") +bazel_dep(name = "rules_python", version = "0.31.0") +bazel_dep(name = "bazel_skylib", version = "1.4.2") + +# Only for development +bazel_dep(name = "platforms", version = "0.0.9", dev_dependency = True) +bazel_dep(name = "rules_cc", version = "0.0.9", dev_dependency = True) +bazel_dep(name = "stardoc", version = "0.6.2", dev_dependency = True) + +# Find the system rpmbuild if one is available. +find_rpm = use_extension("//toolchains/rpm:rpmbuild_configure.bzl", "find_system_rpmbuild_bzlmod", dev_dependency = True) +use_repo(find_rpm, "rules_pkg_rpmbuild") + +register_toolchains( + "@rules_pkg_rpmbuild//:all", + dev_dependency = True, +) diff --git a/third_party/rules_pkg-0.9.1/WORKSPACE b/third_party/rules_pkg-1.0.1/WORKSPACE similarity index 100% rename from third_party/rules_pkg-0.9.1/WORKSPACE rename to third_party/rules_pkg-1.0.1/WORKSPACE diff --git a/third_party/rules_pkg-0.9.1/deps.bzl b/third_party/rules_pkg-1.0.1/deps.bzl similarity index 100% rename from third_party/rules_pkg-0.9.1/deps.bzl rename to third_party/rules_pkg-1.0.1/deps.bzl diff --git a/third_party/rules_pkg-0.9.1/mappings.bzl b/third_party/rules_pkg-1.0.1/mappings.bzl similarity index 100% rename from third_party/rules_pkg-0.9.1/mappings.bzl rename to third_party/rules_pkg-1.0.1/mappings.bzl diff --git a/third_party/rules_pkg-0.9.1/pkg.bzl b/third_party/rules_pkg-1.0.1/pkg.bzl similarity index 100% rename from third_party/rules_pkg-0.9.1/pkg.bzl rename to third_party/rules_pkg-1.0.1/pkg.bzl diff --git a/third_party/rules_pkg-0.9.1/pkg/BUILD b/third_party/rules_pkg-1.0.1/pkg/BUILD similarity index 95% rename from third_party/rules_pkg-0.9.1/pkg/BUILD rename to third_party/rules_pkg-1.0.1/pkg/BUILD index e24fc6b1..b3115f58 100644 --- a/third_party/rules_pkg-0.9.1/pkg/BUILD +++ b/third_party/rules_pkg-1.0.1/pkg/BUILD @@ -12,9 +12,10 @@ # See the License for the specific language governing permissions and # limitations under the License. +load("@rules_pkg//pkg/private:make_starlark_library.bzl", "starlark_library") + # -*- coding: utf-8 -*- load("@rules_python//python:defs.bzl", "py_binary", "py_library") -load("@rules_pkg//pkg/private:make_starlark_library.bzl", "starlark_library") package(default_applicable_licenses = ["//:license"]) @@ -38,6 +39,7 @@ filegroup( srcs = glob([ "*.bzl", "*.py", + "*.tpl", ]) + [ "BUILD", "//pkg/legacy:standard_package", @@ -67,6 +69,7 @@ starlark_library( py_binary( name = "make_rpm", srcs = ["make_rpm.py"], + imports = [".."], python_version = "PY3", srcs_version = "PY3", target_compatible_with = select({ @@ -83,6 +86,7 @@ py_binary( py_library( name = "make_rpm_lib", srcs = ["make_rpm.py"], + imports = [".."], srcs_version = "PY3", visibility = [ "//experimental:__subpackages__", @@ -101,10 +105,9 @@ py_binary( visibility = ["//visibility:public"], ) -# This might be public, but use at your own risk py_library( - name = "verify_archive_test_lib", - srcs = ["verify_archive_test_lib.py"], + name = "filter_directory_lib", + srcs = ["filter_directory.py"], srcs_version = "PY3", visibility = ["//visibility:public"], ) diff --git a/third_party/rules_pkg-0.9.1/pkg/__init__.py b/third_party/rules_pkg-1.0.1/pkg/__init__.py similarity index 100% rename from third_party/rules_pkg-0.9.1/pkg/__init__.py rename to third_party/rules_pkg-1.0.1/pkg/__init__.py diff --git a/third_party/rules_pkg-0.9.1/pkg/deb.bzl b/third_party/rules_pkg-1.0.1/pkg/deb.bzl similarity index 100% rename from third_party/rules_pkg-0.9.1/pkg/deb.bzl rename to third_party/rules_pkg-1.0.1/pkg/deb.bzl diff --git a/third_party/rules_pkg-0.9.1/pkg/deps.bzl b/third_party/rules_pkg-1.0.1/pkg/deps.bzl similarity index 52% rename from third_party/rules_pkg-0.9.1/pkg/deps.bzl rename to third_party/rules_pkg-1.0.1/pkg/deps.bzl index 352bff64..1fecfbfb 100644 --- a/third_party/rules_pkg-0.9.1/pkg/deps.bzl +++ b/third_party/rules_pkg-1.0.1/pkg/deps.bzl @@ -14,40 +14,46 @@ """WORKSPACE dependencies for rules_pkg/pkg.""" -load("@bazel_tools//tools/build_defs/repo:git.bzl", "git_repository") -load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive") +load("@bazel_tools//tools/build_defs/repo:http.bzl", _http_archive = "http_archive") load("@bazel_tools//tools/build_defs/repo:utils.bzl", "maybe") -# @federation: BEGIN @rules_pkg +def http_archive(**kwargs): + maybe(_http_archive, **kwargs) def rules_pkg_dependencies(): - maybe( - http_archive, + http_archive( name = "bazel_skylib", + sha256 = "66ffd9315665bfaafc96b52278f57c7e2dd09f5ede279ea6d39b2be471e7e3aa", urls = [ - "https://mirror.bazel.build/github.com/bazelbuild/bazel-skylib/releases/download/1.1.1/bazel-skylib-1.1.1.tar.gz", - "https://github.com/bazelbuild/bazel-skylib/releases/download/1.1.1/bazel-skylib-1.1.1.tar.gz", + "https://mirror.bazel.build/github.com/bazelbuild/bazel-skylib/releases/download/1.4.2/bazel-skylib-1.4.2.tar.gz", + "https://github.com/bazelbuild/bazel-skylib/releases/download/1.4.2/bazel-skylib-1.4.2.tar.gz", ], - sha256 = "c6966ec828da198c5d9adbaa94c05e3a1c7f21bd012a0b29ba8ddbccb2c93b0d", ) - maybe( - http_archive, + + http_archive( + name = "platforms", + urls = [ + "https://mirror.bazel.build/github.com/bazelbuild/platforms/releases/download/0.0.7/platforms-0.0.7.tar.gz", + "https://github.com/bazelbuild/platforms/releases/download/0.0.7/platforms-0.0.7.tar.gz", + ], + sha256 = "3a561c99e7bdbe9173aa653fd579fe849f1d8d67395780ab4770b1f381431d51", + ) + + http_archive( name = "rules_python", - url = "https://github.com/bazelbuild/rules_python/releases/download/0.1.0/rules_python-0.1.0.tar.gz", - sha256 = "b6d46438523a3ec0f3cead544190ee13223a52f6a6765a29eae7b7cc24cc83a0", + sha256 = "0a8003b044294d7840ac7d9d73eef05d6ceb682d7516781a4ec62eeb34702578", + strip_prefix = "rules_python-0.24.0", + url = "https://github.com/bazelbuild/rules_python/releases/download/0.24.0/rules_python-0.24.0.tar.gz", ) - maybe( - http_archive, + + http_archive( name = "rules_license", urls = [ - "https://mirror.bazel.build/github.com/bazelbuild/rules_license/releases/download/0.0.4/rules_license-0.0.4.tar.gz", - "https://github.com/bazelbuild/rules_license/releases/download/0.0.4/rules_license-0.0.4.tar.gz", - ], - sha256 = "6157e1e68378532d0241ecd15d3c45f6e5cfd98fc10846045509fb2a7cc9e381", + "https://mirror.bazel.build/github.com/bazelbuild/rules_license/releases/download/0.0.7/rules_license-0.0.7.tar.gz", + "https://github.com/bazelbuild/rules_license/releases/download/0.0.7/rules_license-0.0.7.tar.gz", + ], + sha256 = "4531deccb913639c30e5c7512a054d5d875698daeb75d8cf90f284375fe7c360", ) - def rules_pkg_register_toolchains(): pass - -# @federation: END @rules_pkg diff --git a/third_party/rules_pkg-0.9.1/pkg/filter_directory.py b/third_party/rules_pkg-1.0.1/pkg/filter_directory.py similarity index 99% rename from third_party/rules_pkg-0.9.1/pkg/filter_directory.py rename to third_party/rules_pkg-1.0.1/pkg/filter_directory.py index 83424241..49b20366 100644 --- a/third_party/rules_pkg-0.9.1/pkg/filter_directory.py +++ b/third_party/rules_pkg-1.0.1/pkg/filter_directory.py @@ -282,4 +282,4 @@ def value_unused(value_tuple): if __name__ == "__main__": - exit(main(sys.argv[1:])) + sys.exit(main(sys.argv[1:])) diff --git a/third_party/rules_pkg-0.9.1/pkg/install.bzl b/third_party/rules_pkg-1.0.1/pkg/install.bzl similarity index 89% rename from third_party/rules_pkg-0.9.1/pkg/install.bzl rename to third_party/rules_pkg-1.0.1/pkg/install.bzl index fcf4ac82..e7b0e424 100644 --- a/third_party/rules_pkg-0.9.1/pkg/install.bzl +++ b/third_party/rules_pkg-1.0.1/pkg/install.bzl @@ -17,24 +17,20 @@ This module provides an interface (`pkg_install`) for creating a `bazel run`-able installation script. """ -load("//pkg:providers.bzl", "PackageDirsInfo", "PackageFilegroupInfo", "PackageFilesInfo", "PackageSymlinkInfo") -load("//pkg/private:pkg_files.bzl", "process_src", "write_manifest") load("@rules_python//python:defs.bzl", "py_binary") +load("//pkg:providers.bzl", "PackageDirsInfo", "PackageFilegroupInfo", "PackageFilesInfo", "PackageSymlinkInfo") +load("//pkg/private:pkg_files.bzl", "create_mapping_context_from_ctx", "process_src", "write_manifest") def _pkg_install_script_impl(ctx): script_file = ctx.actions.declare_file(ctx.attr.name + ".py") - fragments = [] - files_to_run = [] - content_map = {} + mapping_context = create_mapping_context_from_ctx(ctx, label = ctx.label, default_mode = "0644") for src in ctx.attr.srcs: - process_src(content_map, - files_to_run, - src = src, - origin = src.label, - default_mode = "0644", - default_user = None, - default_group = None) + process_src( + mapping_context, + src = src, + origin = src.label, + ) manifest_file = ctx.actions.declare_file(ctx.attr.name + "-install-manifest.json") @@ -44,14 +40,14 @@ def _pkg_install_script_impl(ctx): # Note that these paths are different when used as tools run within a build. # See also # https://docs.bazel.build/versions/4.1.0/skylark/rules.html#tools-with-runfiles - write_manifest(ctx, manifest_file, content_map, use_short_path = True) + write_manifest(ctx, manifest_file, mapping_context.content_map, use_short_path = True) # Get the label of the actual py_binary used to run this script. # # This is super brittle, but I don't know how to otherwise get this # information without creating a circular dependency given the current state # of rules_python. - + # The name of the binary is the name of this target, minus # "_install_script". label_str = str(ctx.label)[:-len("_install_script")] @@ -72,7 +68,7 @@ def _pkg_install_script_impl(ctx): my_runfiles = ctx.runfiles( files = [manifest_file], - transitive_files = depset(transitive = files_to_run), + transitive_files = depset(transitive = mapping_context.file_deps), ) return [ @@ -87,7 +83,7 @@ _pkg_install_script = rule( doc = """Create an executable package installation script. The outputs of this rule are a single python script intended to be used as - an input to a `py_binary` target. All files necesary to run the script are + an input to a `py_binary` target. All files necessary to run the script are included as runfiles. """, implementation = _pkg_install_script_impl, @@ -142,7 +138,7 @@ def pkg_install(name, srcs, **kwargs): ``` bazel run -- //path/to:install --help ``` - + WARNING: While this rule does function when being run from within a bazel rule, such use is not recommended. If you do, **always** use the `--destdir` argument to specify the desired location for the installation to diff --git a/third_party/rules_pkg-0.9.1/pkg/legacy/BUILD b/third_party/rules_pkg-1.0.1/pkg/legacy/BUILD similarity index 100% rename from third_party/rules_pkg-0.9.1/pkg/legacy/BUILD rename to third_party/rules_pkg-1.0.1/pkg/legacy/BUILD diff --git a/third_party/rules_pkg-0.9.1/pkg/legacy/rpm.bzl b/third_party/rules_pkg-1.0.1/pkg/legacy/rpm.bzl similarity index 89% rename from third_party/rules_pkg-0.9.1/pkg/legacy/rpm.bzl rename to third_party/rules_pkg-1.0.1/pkg/legacy/rpm.bzl index 3211bbdc..74f1730e 100644 --- a/third_party/rules_pkg-0.9.1/pkg/legacy/rpm.bzl +++ b/third_party/rules_pkg-1.0.1/pkg/legacy/rpm.bzl @@ -36,10 +36,10 @@ def _pkg_rpm_impl(ctx): tools = [] args = ["--name=" + ctx.label.name] if ctx.attr.debug: - args += ["--debug"] + args.append("--debug") if ctx.attr.rpmbuild_path: - args += ["--rpmbuild=" + ctx.attr.rpmbuild_path] + args.append("--rpmbuild=" + ctx.attr.rpmbuild_path) # buildifier: disable=print print("rpmbuild_path is deprecated. See the README for instructions on how" + @@ -50,7 +50,7 @@ def _pkg_rpm_impl(ctx): fail("The rpmbuild_toolchain is not properly configured: " + toolchain.name) if toolchain.path: - args += ["--rpmbuild=" + toolchain.path] + args.append("--rpmbuild=" + toolchain.path) else: executable_files = toolchain.label[DefaultInfo].files_to_run tools.append(executable_files) @@ -60,31 +60,31 @@ def _pkg_rpm_impl(ctx): if ctx.attr.version_file: if ctx.attr.version: fail("Both version and version_file attributes were specified") - args += ["--version=@" + ctx.file.version_file.path] - files += [ctx.file.version_file] + args.append("--version=@" + ctx.file.version_file.path) + files.append(ctx.file.version_file) elif ctx.attr.version: - args += ["--version=" + ctx.attr.version] + args.append("--version=" + ctx.attr.version) # Release can be specified by a file or inlined. if ctx.attr.release_file: if ctx.attr.release: fail("Both release and release_file attributes were specified") - args += ["--release=@" + ctx.file.release_file.path] - files += [ctx.file.release_file] + args.append("--release=@" + ctx.file.release_file.path) + files.append(ctx.file.release_file) elif ctx.attr.release: - args += ["--release=" + ctx.attr.release] + args.append("--release=" + ctx.attr.release) # SOURCE_DATE_EPOCH can be specified by a file or inlined. if ctx.attr.source_date_epoch_file: if ctx.attr.source_date_epoch: fail("Both source_date_epoch and source_date_epoch_file attributes were specified") - args += ["--source_date_epoch=@" + ctx.file.source_date_epoch_file.path] - files += [ctx.file.source_date_epoch_file] + args.append("--source_date_epoch=@" + ctx.file.source_date_epoch_file.path) + files.append(ctx.file.source_date_epoch_file) elif ctx.attr.source_date_epoch != None: - args += ["--source_date_epoch=" + str(ctx.attr.source_date_epoch)] + args.append("--source_date_epoch=" + str(ctx.attr.source_date_epoch)) if ctx.attr.architecture: - args += ["--arch=" + ctx.attr.architecture] + args.append("--arch=" + ctx.attr.architecture) if not ctx.attr.spec_file: fail("spec_file was not specified") @@ -102,19 +102,19 @@ def _pkg_rpm_impl(ctx): output = spec_file, substitutions = substitutions, ) - args += ["--spec_file=" + spec_file.path] - files += [spec_file] + args.append("--spec_file=" + spec_file.path) + files.append(spec_file) - args += ["--out_file=" + ctx.outputs.rpm.path] + args.append("--out_file=" + ctx.outputs.rpm.path) # Add data files. if ctx.file.changelog: - files += [ctx.file.changelog] - args += [ctx.file.changelog.path] + files.append(ctx.file.changelog) + args.append(ctx.file.changelog.path) files += ctx.files.data for f in ctx.files.data: - args += [f.path] + args.append(f.path) # Call the generator script. ctx.actions.run( diff --git a/third_party/rules_pkg-0.9.1/pkg/make_rpm.py b/third_party/rules_pkg-1.0.1/pkg/make_rpm.py similarity index 76% rename from third_party/rules_pkg-0.9.1/pkg/make_rpm.py rename to third_party/rules_pkg-1.0.1/pkg/make_rpm.py index e2ffca0a..6a9c37a7 100644 --- a/third_party/rules_pkg-0.9.1/pkg/make_rpm.py +++ b/third_party/rules_pkg-1.0.1/pkg/make_rpm.py @@ -81,10 +81,9 @@ def Cleanup(): def FindOutputFile(log): """Find the written file from the log information.""" - - m = WROTE_FILE_RE.search(log) + m = WROTE_FILE_RE.findall(log) if m: - return m.group('rpm_path') + return m return None def SlurpFile(input_path): @@ -173,6 +172,7 @@ class RpmBuilder(object): SOURCE_DIR = 'SOURCES' BUILD_DIR = 'BUILD' + BUILD_SUBDIR = 'BUILD_SUB' BUILDROOT_DIR = 'BUILDROOT' TEMP_DIR = 'TMP' RPMS_DIR = 'RPMS' @@ -187,7 +187,7 @@ def __init__(self, name, version, release, arch, rpmbuild_path, self.arch = arch self.files = [] self.rpmbuild_path = FindRpmbuild(rpmbuild_path) - self.rpm_path = None + self.rpm_paths = None self.source_date_epoch = helpers.GetFlagValue(source_date_epoch) self.debug = debug @@ -198,11 +198,13 @@ def __init__(self, name, version, release, arch, rpmbuild_path, self.description_file = None self.install_script_file = None self.file_list_path = None + self.changelog = None self.pre_scriptlet = None self.post_scriptlet = None self.preun_scriptlet = None self.postun_scriptlet = None + self.subrpms = None def AddFiles(self, paths, root=''): """Add a set of files to the current RPM. @@ -226,10 +228,13 @@ def SetupWorkdir(self, preamble_file=None, description_file=None, install_script_file=None, + subrpms_file=None, pre_scriptlet_path=None, post_scriptlet_path=None, preun_scriptlet_path=None, postun_scriptlet_path=None, + posttrans_scriptlet_path=None, + changelog_file=None, file_list_path=None): """Create the needed structure in the workdir.""" @@ -255,23 +260,34 @@ def SetupWorkdir(self, # Slurp in the scriptlets... self.pre_scriptlet = \ - SlurpFile(os.path.join(original_dir, pre_scriptlet_path)) if pre_scriptlet_path is not None else '' + SlurpFile(os.path.join(original_dir, pre_scriptlet_path)) if pre_scriptlet_path else '' self.post_scriptlet = \ - SlurpFile(os.path.join(original_dir, post_scriptlet_path)) if post_scriptlet_path is not None else '' + SlurpFile(os.path.join(original_dir, post_scriptlet_path)) if post_scriptlet_path else '' self.preun_scriptlet = \ - SlurpFile(os.path.join(original_dir, preun_scriptlet_path)) if preun_scriptlet_path is not None else '' + SlurpFile(os.path.join(original_dir, preun_scriptlet_path)) if preun_scriptlet_path else '' self.postun_scriptlet = \ - SlurpFile(os.path.join(original_dir, postun_scriptlet_path)) if postun_scriptlet_path is not None else '' + SlurpFile(os.path.join(original_dir, postun_scriptlet_path)) if postun_scriptlet_path else '' + self.posttrans_scriptlet = \ + SlurpFile(os.path.join(original_dir, posttrans_scriptlet_path)) if posttrans_scriptlet_path else '' + self.subrpms = \ + SlurpFile(os.path.join(original_dir, subrpms_file)) if subrpms_file else '' # Then prepare for textual substitution. This is typically only the case for the # experimental `pkg_rpm`. tpl_replacements = { - 'PRE_SCRIPTLET': "%pre\n" + self.pre_scriptlet, - 'POST_SCRIPTLET': "%post\n" + self.post_scriptlet, - 'PREUN_SCRIPTLET': "%preun\n" + self.preun_scriptlet, - 'POSTUN_SCRIPTLET': "%postun\n" + self.postun_scriptlet, + 'PRE_SCRIPTLET': ("%pre\n" + self.pre_scriptlet) if self.pre_scriptlet else "", + 'POST_SCRIPTLET': ("%post\n" + self.post_scriptlet) if self.post_scriptlet else "", + 'PREUN_SCRIPTLET': ("%preun\n" + self.preun_scriptlet) if self.preun_scriptlet else "", + 'POSTUN_SCRIPTLET': ("%postun\n" + self.postun_scriptlet) if self.postun_scriptlet else "", + 'POSTTRANS_SCRIPTLET': ("%posttrans\n" + self.posttrans_scriptlet) if self.posttrans_scriptlet else "", + 'SUBRPMS' : self.subrpms, + 'CHANGELOG': "" } + if changelog_file: + self.changelog = SlurpFile(os.path.join(original_dir, changelog_file)) + tpl_replacements["CHANGELOG"] = "%changelog\n" + self.changelog + # If the spec file has "Version" and "Release" tags specified in the spec # file's preamble, the values are filled in immediately afterward. These go # into "replacements". This is typically only the case for the "original" @@ -330,7 +346,7 @@ def SetupWorkdir(self, shutil.copy(os.path.join(original_dir, file_list_path), RpmBuilder.BUILD_DIR) self.file_list_path = os.path.join(RpmBuilder.BUILD_DIR, os.path.basename(file_list_path)) - def CallRpmBuild(self, dirname, rpmbuild_args): + def CallRpmBuild(self, dirname, rpmbuild_args, debuginfo_type): """Call rpmbuild with the correct arguments.""" buildroot = os.path.join(dirname, RpmBuilder.BUILDROOT_DIR) @@ -346,12 +362,31 @@ def CallRpmBuild(self, dirname, rpmbuild_args): if self.debug: args.append('-vv') + if debuginfo_type == "fedora40": + os.makedirs(f'{dirname}/{RpmBuilder.BUILD_DIR}/{RpmBuilder.BUILD_SUBDIR}') + # Common options + # NOTE: There may be a need to add '--define', 'buildsubdir .' for some + # rpmbuild versions. But that breaks other rpmbuild versions, so before + # adding it back in, add extensive tests. + args += [ + '--define', '_topdir %s' % dirname, + '--define', '_tmppath %s/TMP' % dirname, + '--define', '_builddir %s/BUILD' % dirname, + ] + + if debuginfo_type in ["fedora40", "centos7", "centos9", "almalinux9.3"]: + args += ['--undefine', '_debugsource_packages'] + + if debuginfo_type in ["centos7", "centos9", "almalinux9.3"]: + args += ['--define', 'buildsubdir .'] + + if debuginfo_type == "fedora40": + args += ['--define', f'buildsubdir {RpmBuilder.BUILD_SUBDIR}'] + args += [ - '--define', '_topdir %s' % dirname, - '--define', '_tmppath %s/TMP' % dirname, - '--bb', - '--buildroot=%s' % buildroot, + '--bb', + '--buildroot=%s' % buildroot, ] # yapf: disable # Macro-based RPM parameter substitution, if necessary inputs provided. @@ -363,7 +398,11 @@ def CallRpmBuild(self, dirname, rpmbuild_args): args += ['--define', 'build_rpm_install %s' % self.install_script_file] if self.file_list_path: # %files -f is taken relative to the package root - args += ['--define', 'build_rpm_files %s' % os.path.basename(self.file_list_path)] + base_path = os.path.basename(self.file_list_path) + if debuginfo_type == "fedora40": + base_path = os.path.join("..", base_path) + + args += ['--define', 'build_rpm_files %s' % base_path] args.extend(rpmbuild_args) @@ -393,9 +432,9 @@ def CallRpmBuild(self, dirname, rpmbuild_args): if p.returncode == 0: # Find the created file. - self.rpm_path = FindOutputFile(output) + self.rpm_paths = FindOutputFile(output) - if p.returncode != 0 or not self.rpm_path: + if p.returncode != 0 or not self.rpm_paths: print('Error calling rpmbuild:') print(output) elif self.debug: @@ -404,26 +443,44 @@ def CallRpmBuild(self, dirname, rpmbuild_args): # Return the status. return p.returncode - def SaveResult(self, out_file): + def SaveResult(self, out_file, subrpm_out_files): """Save the result RPM out of the temporary working directory.""" - - if self.rpm_path: - shutil.copy(self.rpm_path, out_file) - if self.debug: - print('Saved RPM file to %s' % out_file) + if self.rpm_paths: + for p in self.rpm_paths: + is_subrpm = False + + for subrpm_name, subrpm_out_file in subrpm_out_files: + subrpm_prefix = self.name + '-' + subrpm_name + + if os.path.basename(p).startswith(subrpm_prefix): + shutil.copy(p, subrpm_out_file) + is_subrpm = True + if self.debug or True: + print('Saved %s sub RPM file to %s' % ( + subrpm_name, subrpm_out_file)) + break + + if not is_subrpm: + shutil.copy(p, out_file) + if self.debug or True: + print('Saved RPM file to %s' % out_file) else: print('No RPM file created.') - def Build(self, spec_file, out_file, + def Build(self, spec_file, out_file, subrpm_out_files=None, preamble_file=None, description_file=None, install_script_file=None, + subrpms_file=None, pre_scriptlet_path=None, post_scriptlet_path=None, preun_scriptlet_path=None, postun_scriptlet_path=None, + posttrans_scriptlet_path=None, file_list_path=None, - rpmbuild_args=None): + changelog_file=None, + rpmbuild_args=None, + debuginfo_type=None): """Build the RPM described by the spec_file, with other metadata in keyword arguments""" if self.debug: @@ -432,19 +489,30 @@ def Build(self, spec_file, out_file, original_dir = os.getcwd() spec_file = os.path.join(original_dir, spec_file) out_file = os.path.join(original_dir, out_file) + + if subrpm_out_files: + subrpm_out_files = (s.split(':') for s in subrpm_out_files) + subrpm_out_files = [ + (s[0], os.path.join(original_dir, s[1])) for s in subrpm_out_files] + else: + subrpm_out_files = [] + with Tempdir() as dirname: self.SetupWorkdir(spec_file, original_dir, preamble_file=preamble_file, description_file=description_file, install_script_file=install_script_file, + subrpms_file=subrpms_file, file_list_path=file_list_path, pre_scriptlet_path=pre_scriptlet_path, post_scriptlet_path=post_scriptlet_path, preun_scriptlet_path=preun_scriptlet_path, - postun_scriptlet_path=postun_scriptlet_path) - status = self.CallRpmBuild(dirname, rpmbuild_args or []) - self.SaveResult(out_file) + postun_scriptlet_path=postun_scriptlet_path, + posttrans_scriptlet_path=posttrans_scriptlet_path, + changelog_file=changelog_file) + status = self.CallRpmBuild(dirname, rpmbuild_args or [], debuginfo_type) + self.SaveResult(out_file, subrpm_out_files) return status @@ -467,6 +535,9 @@ def main(argv): help='The file containing the RPM specification.') parser.add_argument('--out_file', required=True, help='The destination to save the resulting RPM file to.') + parser.add_argument('--subrpm_out_file', action='append', + help='List of destinations to save resulting ' + + 'Sub RPMs to in the form of name:destination') parser.add_argument('--rpmbuild', help='Path to rpmbuild executable.') parser.add_argument('--source_date_epoch', help='Value for the SOURCE_DATE_EPOCH rpmbuild ' @@ -483,6 +554,8 @@ def main(argv): help='File containing the RPM Preamble') parser.add_argument('--description', help='File containing the RPM %description text') + parser.add_argument('--subrpms', + help='File containing the RPM subrpm details') parser.add_argument('--pre_scriptlet', help='File containing the RPM %pre scriptlet, if to be substituted') parser.add_argument('--post_scriptlet', @@ -491,9 +564,15 @@ def main(argv): help='File containing the RPM %preun scriptlet, if to be substituted') parser.add_argument('--postun_scriptlet', help='File containing the RPM %postun scriptlet, if to be substituted') + parser.add_argument('--posttrans_scriptlet', + help='File containing the RPM %posttrans scriptlet, if to be substituted') + parser.add_argument('--changelog', + help='File containing the RPM changelog text') parser.add_argument('--rpmbuild_arg', dest='rpmbuild_args', action='append', help='Any additional arguments to pass to rpmbuild') + parser.add_argument('--debuginfo_type', dest='debuginfo_type', default='none', + help='debuginfo type to use (centos7, fedora40, or none)') parser.add_argument('files', nargs='*') options = parser.parse_args(argv or ()) @@ -506,15 +585,20 @@ def main(argv): debug=options.debug) builder.AddFiles(options.files) return builder.Build(options.spec_file, options.out_file, + options.subrpm_out_file, preamble_file=options.preamble, description_file=options.description, install_script_file=options.install_script, + subrpms_file=options.subrpms, file_list_path=options.file_list, pre_scriptlet_path=options.pre_scriptlet, post_scriptlet_path=options.post_scriptlet, preun_scriptlet_path=options.preun_scriptlet, postun_scriptlet_path=options.postun_scriptlet, - rpmbuild_args=options.rpmbuild_args) + posttrans_scriptlet_path=options.posttrans_scriptlet, + changelog_file=options.changelog, + rpmbuild_args=options.rpmbuild_args, + debuginfo_type=options.debuginfo_type) except NoRpmbuildFoundError: print('ERROR: rpmbuild is required but is not present in PATH') return 1 diff --git a/third_party/rules_pkg-0.9.1/pkg/mappings.bzl b/third_party/rules_pkg-1.0.1/pkg/mappings.bzl similarity index 88% rename from third_party/rules_pkg-0.9.1/pkg/mappings.bzl rename to third_party/rules_pkg-1.0.1/pkg/mappings.bzl index e49acd07..d5a96a0b 100644 --- a/third_party/rules_pkg-0.9.1/pkg/mappings.bzl +++ b/third_party/rules_pkg-1.0.1/pkg/mappings.bzl @@ -27,8 +27,8 @@ Rules that actually make use of the outputs of the above rules are not specified here. """ -load("//pkg:providers.bzl", "PackageDirsInfo", "PackageFilegroupInfo", "PackageFilesInfo", "PackageSymlinkInfo") load("@bazel_skylib//lib:paths.bzl", "paths") +load("//pkg:providers.bzl", "PackageDirsInfo", "PackageFilegroupInfo", "PackageFilesInfo", "PackageSymlinkInfo") # TODO(#333): strip_prefix module functions should produce unique outputs. In # particular, this one and `_sp_from_pkg` can overlap. @@ -72,15 +72,21 @@ strip_prefix = struct( from_root = _sp_from_root, ) -def pkg_attributes(mode = None, user = None, group = None, **kwargs): +def pkg_attributes( + mode = None, + user = None, + group = None, + uid = None, + gid = None, + **kwargs): """Format attributes for use in package mapping rules. If "mode" is not provided, it will default to the mapping rule's default mode. These vary per mapping rule; consult the respective documentation for more details. - Not providing any of "user", or "group" will result in the package builder - choosing one for you. The chosen value should not be relied upon. + Not providing any of "user", "group", "uid", or "gid" will result in the package + builder choosing one for you. The chosen value should not be relied upon. Well-known attributes outside of the above are documented in the rules_pkg reference. @@ -90,8 +96,10 @@ def pkg_attributes(mode = None, user = None, group = None, **kwargs): Args: mode: string: UNIXy octal permissions, as a string. - user: string: Filesystem owning user. - group: string: Filesystem owning group. + user: string: Filesystem owning user name. + group: string: Filesystem owning group name. + uid: int: Filesystem owning user id. + gid: int: Filesystem owning group id. **kwargs: any other desired attributes. Returns: @@ -105,6 +113,23 @@ def pkg_attributes(mode = None, user = None, group = None, **kwargs): ret["user"] = user if group: ret["group"] = group + if uid != None: + if type(uid) != type(0): + fail('Got "' + str(uid) + '" instead of integer uid') + ret["uid"] = uid + if gid != None: + if type(gid) != type(0): + fail('Got "' + str(gid) + '" instead of integer gid') + ret["gid"] = gid + + if user != None and user.isdigit() and uid == None: + # buildifier: disable=print + print("Warning: found numeric username and no uid, did you mean to specify the uid instead?") + + if group != None and group.isdigit() and gid == None: + # buildifier: disable=print + print("Warning: found numeric group and no gid, did you mean to specify the gid instead?") + return json.encode(ret) #### @@ -122,8 +147,8 @@ def _do_strip_prefix(path, to_strip, src_file): if path_norm.startswith(to_strip_norm): return path_norm[len(to_strip_norm):] - elif src_file.is_directory and (path_norm + '/') == to_strip_norm: - return '' + elif src_file.is_directory and (path_norm + "/") == to_strip_norm: + return "" else: # Avoid user surprise by failing if prefix stripping doesn't work as # expected. @@ -196,7 +221,13 @@ def _pkg_files_impl(ctx): # The input sources are already known. Let's calculate the destinations... # Exclude excludes - srcs = [f for f in ctx.files.srcs if f not in ctx.files.excludes] + srcs = [] # srcs is source File objects, not Targets + file_to_target = {} + for src in ctx.attr.srcs: + for f in src[DefaultInfo].files.to_list(): + if f not in ctx.files.excludes: + srcs.append(f) + file_to_target[f] = src if ctx.attr.strip_prefix == _PKGFILEGROUP_STRIP_ALL: src_dest_paths_map = {src: paths.join(ctx.attr.prefix, src.basename) for src in srcs} @@ -260,6 +291,26 @@ def _pkg_files_impl(ctx): else: src_dest_paths_map[src_file] = paths.join(ctx.attr.prefix, rename_dest) + # At this point, we have a fully valid src -> dest mapping for all the + # explicitly named targets in srcs. Now we can fill in their runfiles. + if ctx.attr.include_runfiles: + for src in srcs: + target = file_to_target[src] + runfiles = target[DefaultInfo].default_runfiles + if runfiles: + base_path = src_dest_paths_map[src] + ".runfiles/" + ctx.workspace_name + for rf in runfiles.files.to_list(): + dest_path = paths.join(base_path, rf.short_path) + + # print("Add runfile:", rf.path, 'as', dest_path) + have_it = src_dest_paths_map.get(rf) + if have_it: + if have_it != dest_path: + # buildifier: disable=print + print("same source mapped to different locations", rf, have_it, dest_path) + else: + src_dest_paths_map[rf] = dest_path + # At this point, we have a fully valid src -> dest mapping in src_dest_paths_map. # # Construct the inverse of this mapping to pass to the output providers, and @@ -403,6 +454,14 @@ pkg_files = rule( default = {}, allow_files = True, ), + "include_runfiles": attr.bool( + doc = """Add runfiles for all srcs. + + The runfiles are in the paths that Bazel uses. For example, for the + target `//my_prog:foo`, we would see files under paths like + `foo.runfiles//my_prog/` + """, + ), }, provides = [PackageFilesInfo], ) @@ -537,30 +596,32 @@ pkg_mklink_impl = rule( provides = [PackageSymlinkInfo], ) -def pkg_mklink(name, link_name, target, attributes=None, src=None, **kwargs): - """Create a symlink. - - Args: - name: target name - target: target path that the link should point to. - link_name: the path in the package that should point to the target. - attributes: file attributes. - """ - if src: - if target: - fail("You can not specify both target and src.") - # buildifier: disable=print - print("Warning: pkg_mklink.src is deprecated. Use target.") - target = src - pkg_mklink_impl( - name = name, - target = target, - link_name = link_name, - attributes = attributes, - **kwargs, - ) +#buildifier: disable=function-docstring-args +def pkg_mklink(name, link_name, target, attributes = None, src = None, **kwargs): + """Create a symlink. + Wraps [pkg_mklink_impl](#pkg_mklink_impl) + Args: + name: target name + target: target path that the link should point to. + link_name: the path in the package that should point to the target. + attributes: file attributes. + """ + if src: + if target: + fail("You can not specify both target and src.") + + # buildifier: disable=print + print("Warning: pkg_mklink.src is deprecated. Use target.") + target = src + pkg_mklink_impl( + name = name, + target = target, + link_name = link_name, + attributes = attributes, + **kwargs + ) def _pkg_filegroup_impl(ctx): files = [] diff --git a/third_party/rules_pkg-0.9.1/pkg/package_variables.bzl b/third_party/rules_pkg-1.0.1/pkg/package_variables.bzl similarity index 86% rename from third_party/rules_pkg-0.9.1/pkg/package_variables.bzl rename to third_party/rules_pkg-1.0.1/pkg/package_variables.bzl index 865fefdd..28f62c92 100644 --- a/third_party/rules_pkg-0.9.1/pkg/package_variables.bzl +++ b/third_party/rules_pkg-1.0.1/pkg/package_variables.bzl @@ -16,6 +16,6 @@ def add_ctx_variables(ctx, values): """Add selected variables from ctx.""" - values['target_cpu'] = ctx.var.get("TARGET_CPU") - values['compilation_mode'] = ctx.var.get("COMPILATION_MODE") + values["target_cpu"] = ctx.var.get("TARGET_CPU") + values["compilation_mode"] = ctx.var.get("COMPILATION_MODE") return values diff --git a/third_party/rules_pkg-0.9.1/pkg/path.bzl b/third_party/rules_pkg-1.0.1/pkg/path.bzl similarity index 83% rename from third_party/rules_pkg-0.9.1/pkg/path.bzl rename to third_party/rules_pkg-1.0.1/pkg/path.bzl index c933ccd5..f384398d 100644 --- a/third_party/rules_pkg-0.9.1/pkg/path.bzl +++ b/third_party/rules_pkg-1.0.1/pkg/path.bzl @@ -11,8 +11,10 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -"""Helper functions that don't depend on Skylark, so can be unit tested.""" +"""Helper functions that don't depend on Starlark, so can be unit tested.""" +# buildifier: disable=function-docstring-args +# buildifier: disable=function-docstring-return def safe_short_path(file_): """Like `File.short_path` but safe for use with files from external repositories. """ @@ -27,9 +29,10 @@ def safe_short_path(file_): # Beginning with `file_.path`, remove optional `F.root.path`. working_path = file_.path if not file_.is_source: - working_path = working_path[len(file_.root.path)+1:] + working_path = working_path[len(file_.root.path) + 1:] return working_path +# buildifier: disable=function-docstring-args,function-docstring-return def _short_path_dirname(path): """Returns the directory's name of the short path of an artifact.""" sp = safe_short_path(path) @@ -39,6 +42,8 @@ def _short_path_dirname(path): return "" return sp[:last_pkg] +# buildifier: disable=function-docstring-args +# buildifier: disable=function-docstring-return def dest_path(f, strip_prefix, data_path_without_prefix = ""): """Returns the short path of f, stripped of strip_prefix.""" f_short_path = safe_short_path(f) @@ -56,32 +61,33 @@ def dest_path(f, strip_prefix, data_path_without_prefix = ""): # Avoid stripping prefix if final directory is incomplete if prefix_last_dir not in f_short_path.split("/"): - strip_prefix = data_path_without_prefix + strip_prefix = data_path_without_prefix return f_short_path[len(strip_prefix):] return f_short_path -def compute_data_path(ctx, data_path): +def compute_data_path(label, data_path): """Compute the relative data path prefix from the data_path attribute. Args: - ctx: rule implementation ctx. - data_path: path to a file, relative to the package of the rule ctx. + label: target label + data_path: path to a file, relative to the package of the label. + Returns: + str """ - build_dir = ctx.label.package if data_path: # Strip ./ from the beginning if specified. # There is no way to handle .// correctly (no function that would make - # that possible and Skylark is not turing complete) so just consider it + # that possible and Starlark is not turing complete) so just consider it # as an absolute path. if len(data_path) >= 2 and data_path[0:2] == "./": data_path = data_path[2:] if not data_path or data_path == ".": # Relative to current package - return build_dir + return label.package elif data_path[0] == "/": # Absolute path return data_path[1:] else: # Relative to a sub-directory - tmp_short_path_dirname = build_dir + tmp_short_path_dirname = label.package if tmp_short_path_dirname: return tmp_short_path_dirname + "/" + data_path return data_path diff --git a/third_party/rules_pkg-0.9.1/pkg/pkg.bzl b/third_party/rules_pkg-1.0.1/pkg/pkg.bzl similarity index 100% rename from third_party/rules_pkg-0.9.1/pkg/pkg.bzl rename to third_party/rules_pkg-1.0.1/pkg/pkg.bzl diff --git a/third_party/rules_pkg-0.9.1/pkg/private/BUILD b/third_party/rules_pkg-1.0.1/pkg/private/BUILD similarity index 96% rename from third_party/rules_pkg-0.9.1/pkg/private/BUILD rename to third_party/rules_pkg-1.0.1/pkg/private/BUILD index 85096bcb..8f3ca4c6 100644 --- a/third_party/rules_pkg-0.9.1/pkg/private/BUILD +++ b/third_party/rules_pkg-1.0.1/pkg/private/BUILD @@ -65,6 +65,7 @@ py_library( srcs = [ "build_info.py", ], + imports = ["../.."], srcs_version = "PY3", visibility = [ "//:__subpackages__", @@ -78,6 +79,7 @@ py_library( "__init__.py", "archive.py", ], + imports = ["../.."], srcs_version = "PY3", visibility = [ "//:__subpackages__", @@ -91,6 +93,7 @@ py_library( "__init__.py", "helpers.py", ], + imports = ["../.."], srcs_version = "PY3", visibility = [ "//:__subpackages__", @@ -101,6 +104,7 @@ py_library( py_library( name = "manifest", srcs = ["manifest.py"], + imports = ["../.."], srcs_version = "PY3", visibility = ["//visibility:public"], ) diff --git a/third_party/rules_pkg-0.9.1/pkg/private/__init__.py b/third_party/rules_pkg-1.0.1/pkg/private/__init__.py similarity index 100% rename from third_party/rules_pkg-0.9.1/pkg/private/__init__.py rename to third_party/rules_pkg-1.0.1/pkg/private/__init__.py diff --git a/third_party/rules_pkg-0.9.1/pkg/private/archive.py b/third_party/rules_pkg-1.0.1/pkg/private/archive.py similarity index 100% rename from third_party/rules_pkg-0.9.1/pkg/private/archive.py rename to third_party/rules_pkg-1.0.1/pkg/private/archive.py diff --git a/third_party/rules_pkg-0.9.1/pkg/private/build_info.py b/third_party/rules_pkg-1.0.1/pkg/private/build_info.py similarity index 100% rename from third_party/rules_pkg-0.9.1/pkg/private/build_info.py rename to third_party/rules_pkg-1.0.1/pkg/private/build_info.py diff --git a/third_party/rules_pkg-0.9.1/pkg/private/deb/BUILD b/third_party/rules_pkg-1.0.1/pkg/private/deb/BUILD similarity index 96% rename from third_party/rules_pkg-0.9.1/pkg/private/deb/BUILD rename to third_party/rules_pkg-1.0.1/pkg/private/deb/BUILD index c38da64b..646877b1 100644 --- a/third_party/rules_pkg-0.9.1/pkg/private/deb/BUILD +++ b/third_party/rules_pkg-1.0.1/pkg/private/deb/BUILD @@ -48,6 +48,7 @@ exports_files( py_binary( name = "make_deb", srcs = ["make_deb.py"], + imports = ["../../.."], python_version = "PY3", visibility = ["//visibility:public"], deps = [ @@ -58,6 +59,7 @@ py_binary( py_library( name = "make_deb_lib", srcs = ["make_deb.py"], + imports = ["../../.."], srcs_version = "PY3", visibility = ["//tests/deb:__pkg__"], deps = [ diff --git a/third_party/rules_pkg-0.9.1/pkg/private/deb/deb.bzl b/third_party/rules_pkg-1.0.1/pkg/private/deb/deb.bzl similarity index 77% rename from third_party/rules_pkg-0.9.1/pkg/private/deb/deb.bzl rename to third_party/rules_pkg-1.0.1/pkg/private/deb/deb.bzl index 2541d86b..53bc2ebb 100644 --- a/third_party/rules_pkg-0.9.1/pkg/private/deb/deb.bzl +++ b/third_party/rules_pkg-1.0.1/pkg/private/deb/deb.bzl @@ -13,10 +13,10 @@ # limitations under the License. """Rule for creating Debian packages.""" -load("//pkg:providers.bzl", "PackageArtifactInfo", "PackageVariablesInfo") +load("//pkg:providers.bzl", "PackageVariablesInfo") load("//pkg/private:util.bzl", "setup_output_files") -_tar_filetype = [".tar", ".tar.gz", ".tgz", ".tar.bz2", "tar.xz"] +_tar_filetype = [".tar", ".tar.gz", ".tgz", ".tar.bz2", "tar.xz", "tar.zst"] def _pkg_deb_impl(ctx): """The implementation for the pkg_deb rule.""" @@ -50,39 +50,39 @@ def _pkg_deb_impl(ctx): if ctx.attr.architecture_file: if ctx.attr.architecture != "all": fail("Both architecture and architecture_file attributes were specified") - args += ["--architecture=@" + ctx.file.architecture_file.path] - files += [ctx.file.architecture_file] + args.append("--architecture=@" + ctx.file.architecture_file.path) + files.append(ctx.file.architecture_file) else: - args += ["--architecture=" + ctx.attr.architecture] + args.append("--architecture=" + ctx.attr.architecture) if ctx.attr.preinst: - args += ["--preinst=@" + ctx.file.preinst.path] - files += [ctx.file.preinst] + args.append("--preinst=@" + ctx.file.preinst.path) + files.append(ctx.file.preinst) if ctx.attr.postinst: - args += ["--postinst=@" + ctx.file.postinst.path] - files += [ctx.file.postinst] + args.append("--postinst=@" + ctx.file.postinst.path) + files.append(ctx.file.postinst) if ctx.attr.prerm: - args += ["--prerm=@" + ctx.file.prerm.path] - files += [ctx.file.prerm] + args.append("--prerm=@" + ctx.file.prerm.path) + files.append(ctx.file.prerm) if ctx.attr.postrm: - args += ["--postrm=@" + ctx.file.postrm.path] - files += [ctx.file.postrm] + args.append("--postrm=@" + ctx.file.postrm.path) + files.append(ctx.file.postrm) if ctx.attr.config: - args += ["--config=@" + ctx.file.config.path] - files += [ctx.file.config] + args.append("--config=@" + ctx.file.config.path) + files.append(ctx.file.config) if ctx.attr.templates: - args += ["--templates=@" + ctx.file.templates.path] - files += [ctx.file.templates] + args.append("--templates=@" + ctx.file.templates.path) + files.append(ctx.file.templates) if ctx.attr.triggers: - args += ["--triggers=@" + ctx.file.triggers.path] - files += [ctx.file.triggers] + args.append("--triggers=@" + ctx.file.triggers.path) + files.append(ctx.file.triggers) # Conffiles can be specified by a file or a string list if ctx.attr.conffiles_file: if ctx.attr.conffiles: fail("Both conffiles and conffiles_file attributes were specified") - args += ["--conffile=@" + ctx.file.conffiles_file.path] - files += [ctx.file.conffiles_file] + args.append("--conffile=@" + ctx.file.conffiles_file.path) + files.append(ctx.file.conffiles_file) elif ctx.attr.conffiles: args += ["--conffile=%s" % cf for cf in ctx.attr.conffiles] @@ -90,51 +90,55 @@ def _pkg_deb_impl(ctx): if ctx.attr.version_file: if ctx.attr.version: fail("Both version and version_file attributes were specified") - args += ["--version=@" + ctx.file.version_file.path] - files += [ctx.file.version_file] + args.append("--version=@" + ctx.file.version_file.path) + files.append(ctx.file.version_file) elif ctx.attr.version: - args += ["--version=" + ctx.attr.version] + args.append("--version=" + ctx.attr.version) else: fail("Neither version_file nor version attribute was specified") if ctx.attr.description_file: if ctx.attr.description: fail("Both description and description_file attributes were specified") - args += ["--description=@" + ctx.file.description_file.path] - files += [ctx.file.description_file] + args.append("--description=@" + ctx.file.description_file.path) + files.append(ctx.file.description_file) elif ctx.attr.description: - args += ["--description=" + ctx.attr.description] + args.append("--description=" + ctx.attr.description) else: fail("Neither description_file nor description attribute was specified") + if ctx.attr.changelog: + args.append("--changelog=@" + ctx.file.changelog.path) + files.append(ctx.file.changelog) + # Built using can also be specified by a file or inlined (but is not mandatory) if ctx.attr.built_using_file: if ctx.attr.built_using: fail("Both build_using and built_using_file attributes were specified") - args += ["--built_using=@" + ctx.file.built_using_file.path] - files += [ctx.file.built_using_file] + args.append("--built_using=@" + ctx.file.built_using_file.path) + files.append(ctx.file.built_using_file) elif ctx.attr.built_using: - args += ["--built_using=" + ctx.attr.built_using] + args.append("--built_using=" + ctx.attr.built_using) if ctx.attr.depends_file: if ctx.attr.depends: fail("Both depends and depends_file attributes were specified") - args += ["--depends=@" + ctx.file.depends_file.path] - files += [ctx.file.depends_file] + args.append("--depends=@" + ctx.file.depends_file.path) + files.append(ctx.file.depends_file) elif ctx.attr.depends: args += ["--depends=" + d for d in ctx.attr.depends] if ctx.attr.priority: - args += ["--priority=" + ctx.attr.priority] + args.append("--priority=" + ctx.attr.priority) if ctx.attr.section: - args += ["--section=" + ctx.attr.section] + args.append("--section=" + ctx.attr.section) if ctx.attr.homepage: - args += ["--homepage=" + ctx.attr.homepage] + args.append("--homepage=" + ctx.attr.homepage) if ctx.attr.license: - args += ["--license=" + ctx.attr.license] + args.append("--license=" + ctx.attr.license) - args += ["--distribution=" + ctx.attr.distribution] - args += ["--urgency=" + ctx.attr.urgency] + args.append("--distribution=" + ctx.attr.distribution) + args.append("--urgency=" + ctx.attr.urgency) args += ["--suggests=" + d for d in ctx.attr.suggests] args += ["--enhances=" + d for d in ctx.attr.enhances] args += ["--conflicts=" + d for d in ctx.attr.conflicts] @@ -168,16 +172,23 @@ def _pkg_deb_impl(ctx): files = depset([output_file]), runfiles = ctx.runfiles(files = outputs), ), - PackageArtifactInfo( - label = ctx.label.name, - file = output_file, - file_name = output_name, - ), ] # A rule for creating a deb file, see README.md pkg_deb_impl = rule( implementation = _pkg_deb_impl, + doc = """ + Create a Debian package. + + This rule produces 2 artifacts: a .deb and a .changes file. The DefaultInfo will + include both. If you need downstream rule to specifically depend on only the .deb or + .changes file then you can use `filegroup` to select distinct output groups. + + **OutputGroupInfo** + - `out` the Debian package or a symlink to the actual package. + - `deb` the package with any precise file name created with `package_file_name`. + - `changes` the .changes file. + """, attrs = { # @unsorted-dict-items "data": attr.label( @@ -215,12 +226,17 @@ pkg_deb_impl = rule( See https://www.debian.org/doc/debian-policy/ch-binary.html#prompting-in-maintainer-scripts.""", allow_single_file = True, ), + "changelog": attr.label( + doc = """The package changelog. + See https://www.debian.org/doc/debian-policy/ch-source.html#s-dpkgchangelog.""", + allow_single_file = True, + ), "description": attr.string( doc = """The package description. Must not be used with `description_file`.""", ), "description_file": attr.label( doc = """The package description. Must not be used with `description`.""", - allow_single_file = True + allow_single_file = True, ), "distribution": attr.string( doc = """"distribution: See http://www.debian.org/doc/debian-policy.""", @@ -261,11 +277,11 @@ pkg_deb_impl = rule( allow_single_file = True, ), "built_using": attr.string( - doc="""The tool that were used to build this package provided either inline (with built_using) or from a file (with built_using_file).""" + doc = """The tool that were used to build this package provided either inline (with built_using) or from a file (with built_using_file).""", ), "built_using_file": attr.label( - doc="""The tool that were used to build this package provided either inline (with built_using) or from a file (with built_using_file).""", - allow_single_file = True + doc = """The tool that were used to build this package provided either inline (with built_using) or from a file (with built_using_file).""", + allow_single_file = True, ), "conffiles": attr.string_list( doc = """The list of conffiles or a file containing one conffile per line. Each item is an absolute path on the target system where the deb is installed. @@ -287,7 +303,6 @@ See https://www.debian.org/doc/debian-policy/ch-files.html#s-config-files.""", ), "homepage": attr.string(doc = """The homepage of the project."""), "license": attr.string(doc = """The license of the project."""), - "breaks": attr.string_list( doc = """See http://www.debian.org/doc/debian-policy/ch-relationships.html#s-binarydeps.""", default = [], @@ -332,15 +347,15 @@ See https://www.debian.org/doc/debian-policy/ch-files.html#s-config-files.""", # Common attributes "out": attr.output( - doc = """See Common Attributes""", - mandatory = True + doc = """See [Common Attributes](#out)""", + mandatory = True, ), "package_file_name": attr.string( - doc = """See Common Attributes. + doc = """See [Common Attributes](#package_file_name). Default: "{package}-{version}-{architecture}.deb""", ), "package_variables": attr.label( - doc = """See Common Attributes""", + doc = """See [Common Attributes](#package_variables)""", providers = [PackageVariablesInfo], ), @@ -352,13 +367,12 @@ See https://www.debian.org/doc/debian-policy/ch-files.html#s-config-files.""", allow_files = True, ), }, - provides = [PackageArtifactInfo], ) def pkg_deb(name, out = None, **kwargs): """@wraps(pkg_deb_impl).""" if not out: - out = name + ".deb" + out = name + ".deb" pkg_deb_impl( name = name, out = out, diff --git a/third_party/rules_pkg-0.9.1/pkg/private/deb/make_deb.py b/third_party/rules_pkg-1.0.1/pkg/private/deb/make_deb.py similarity index 90% rename from third_party/rules_pkg-0.9.1/pkg/private/deb/make_deb.py rename to third_party/rules_pkg-1.0.1/pkg/private/deb/make_deb.py index 9e2e5c5e..9d10c4b9 100644 --- a/third_party/rules_pkg-0.9.1/pkg/private/deb/make_deb.py +++ b/third_party/rules_pkg-1.0.1/pkg/private/deb/make_deb.py @@ -14,6 +14,7 @@ """A simple cross-platform helper to create a debian package.""" import argparse +from enum import Enum import gzip import hashlib import io @@ -30,6 +31,8 @@ from pkg.private import helpers +Multiline = Enum('Multiline', ['NO', 'YES', 'YES_ADD_NEWLINE']) + # list of debian fields : (name, mandatory, is_multiline[, default]) # see http://www.debian.org/doc/debian-policy/ch-controlfields.html @@ -118,7 +121,7 @@ def AddArFileEntry(fileobj, filename, fileobj.write(b'\n') # 2-byte alignment padding -def MakeDebianControlField(name: str, value: str, is_multiline:bool=False) -> str: +def MakeDebianControlField(name: str, value: str, multiline:Multiline=Multiline.NO) -> str: """Add a field to a debian control file. https://www.debian.org/doc/debian-policy/ch-controlfields.html#syntax-of-control-files @@ -132,15 +135,20 @@ def MakeDebianControlField(name: str, value: str, is_multiline:bool=False) -> st if isinstance(value, list): value = u', '.join(value) value = value.rstrip() - if not is_multiline: + if multiline == Multiline.NO: value = value.strip() if '\n' in value: raise ValueError( '\\n is not allowed in simple control fields (%s)' % value) lines = value.split('\n') - result = name + ': ' +lines[0].strip() + '\n' - for line in lines[1:]: + i = 0 + if multiline != Multiline.YES_ADD_NEWLINE: + result = name + ': ' + lines[i].strip() + '\n' + i = 1 + else: + result = name + ':\n' + for line in lines[i:]: if not line.startswith(' '): result += ' ' result += line @@ -155,10 +163,10 @@ def CreateDebControl(extrafiles=None, **kwargs): for values in DEBIAN_FIELDS: fieldname = values[0] mandatory = values[1] - is_multiline = values[2] + multiline = Multiline.YES if values[2] else Multiline.NO key = fieldname[0].lower() + fieldname[1:].replace('-', '') if mandatory or (key in kwargs and kwargs[key]): - controlfile += MakeDebianControlField(fieldname, kwargs[key], is_multiline) + controlfile += MakeDebianControlField(fieldname, kwargs[key], multiline) # Create the control.tar file tar = io.BytesIO() with gzip.GzipFile('control.tar.gz', mode='w', fileobj=tar, mtime=0) as gz: @@ -190,6 +198,7 @@ def CreateDeb(output, templates=None, triggers=None, conffiles=None, + changelog=None, **kwargs): """Create a full debian package.""" extrafiles = OrderedDict() @@ -209,6 +218,8 @@ def CreateDeb(output, extrafiles['triggers'] = (triggers, 0o644) if conffiles: extrafiles['conffiles'] = ('\n'.join(conffiles) + '\n', 0o644) + if changelog: + extrafiles['changelog'] = (changelog, 0o644) control = CreateDebControl(extrafiles=extrafiles, **kwargs) # Write the final AR archive (the deb package) @@ -226,7 +237,7 @@ def CreateDeb(output, ext = 'tar.bz2' else: ext = '.'.join(ext) - if ext not in ['tar.bz2', 'tar.gz', 'tar.xz', 'tar.lzma']: + if ext not in ['tar.bz2', 'tar.gz', 'tar.xz', 'tar.lzma', 'tar.zst']: ext = 'tar' data_size = os.stat(data).st_size with open(data, 'rb') as datafile: @@ -280,7 +291,7 @@ def CreateChanges(output, changesdata = u''.join([ MakeDebianControlField('Format', '1.8'), - MakeDebianControlField('Date', time.ctime(timestamp)), + MakeDebianControlField('Date', time.asctime(time.gmtime(timestamp))), MakeDebianControlField('Source', package), MakeDebianControlField('Binary', package), MakeDebianControlField('Architecture', architecture), @@ -290,21 +301,27 @@ def CreateChanges(output, MakeDebianControlField('Maintainer', maintainer), MakeDebianControlField('Changed-By', maintainer), # The description in the changes file is strange - 'Description:\n %s - %s\n' % (package, description.split('\n')[0]), + MakeDebianControlField('Description', ( + '%s - %s\n') % ( + package, description.split('\n')[0]), + multiline=Multiline.YES_ADD_NEWLINE), MakeDebianControlField('Changes', ( - '\n %s (%s) %s; urgency=%s' + '%s (%s) %s; urgency=%s' '\n Changes are tracked in revision control.') % ( package, version, distribution, urgency), - is_multiline=True), + multiline=Multiline.YES_ADD_NEWLINE), MakeDebianControlField( - 'Files', '\n ' + ' '.join( - [checksums['md5'], debsize, section, priority, deb_basename])), + 'Files', ' '.join( + [checksums['md5'], debsize, section, priority, deb_basename]), + multiline=Multiline.YES_ADD_NEWLINE), MakeDebianControlField( 'Checksums-Sha1', - '\n ' + ' '.join([checksums['sha1'], debsize, deb_basename])), + ' '.join([checksums['sha1'], debsize, deb_basename]), + multiline=Multiline.YES_ADD_NEWLINE), MakeDebianControlField( 'Checksums-Sha256', - '\n ' + ' '.join([checksums['sha256'], debsize, deb_basename])) + ' '.join([checksums['sha256'], debsize, deb_basename]), + multiline=Multiline.YES_ADD_NEWLINE) ]) with open(output, 'wb') as changes_fh: changes_fh.write(changesdata.encode('utf-8')) @@ -353,6 +370,9 @@ def main(): parser.add_argument( '--conffile', action='append', help='List of conffiles (prefix item with @ to provide a path)') + parser.add_argument( + '--changelog', + help='The changelog file (prefix item with @ to provide a path).') AddControlFlags(parser) options = parser.parse_args() @@ -367,6 +387,7 @@ def main(): templates=helpers.GetFlagValue(options.templates, False), triggers=helpers.GetFlagValue(options.triggers, False), conffiles=GetFlagValues(options.conffile), + changelog=helpers.GetFlagValue(options.changelog, False), package=options.package, version=helpers.GetFlagValue(options.version), description=helpers.GetFlagValue(options.description), diff --git a/third_party/rules_pkg-0.9.1/pkg/private/helpers.py b/third_party/rules_pkg-1.0.1/pkg/private/helpers.py similarity index 98% rename from third_party/rules_pkg-0.9.1/pkg/private/helpers.py rename to third_party/rules_pkg-1.0.1/pkg/private/helpers.py index 0ad8e934..5147cc2f 100644 --- a/third_party/rules_pkg-0.9.1/pkg/private/helpers.py +++ b/third_party/rules_pkg-1.0.1/pkg/private/helpers.py @@ -52,7 +52,7 @@ def GetFlagValue(flagvalue, strip=True): """Converts a raw flag string to a useable value. 1. Expand @filename style flags to the content of filename. - 2. Cope with Python3 strangness of sys.argv. + 2. Cope with Python3 strangeness of sys.argv. sys.argv is not actually proper str types on Unix with Python3 The bytes of the arg are each directly transcribed to the characters of the str. It is actually more complex than that, as described in the docs. diff --git a/third_party/rules_pkg-0.9.1/pkg/private/install.py.tpl b/third_party/rules_pkg-1.0.1/pkg/private/install.py.tpl similarity index 98% rename from third_party/rules_pkg-0.9.1/pkg/private/install.py.tpl rename to third_party/rules_pkg-1.0.1/pkg/private/install.py.tpl index 612908a2..1e82fae5 100644 --- a/third_party/rules_pkg-0.9.1/pkg/private/install.py.tpl +++ b/third_party/rules_pkg-1.0.1/pkg/private/install.py.tpl @@ -186,7 +186,7 @@ def main(args): installer = NativeInstaller(destdir=args.destdir) if not CALLED_FROM_BAZEL_RUN and RUNFILE_PREFIX is None: - logging.critical("RUNFILES_DIR must be set in your enviornment when this is run as a bazel build tool.") + logging.critical("RUNFILES_DIR must be set in your environment when this is run as a bazel build tool.") logging.critical("This is most likely an issue on Windows. See https://github.com/bazelbuild/rules_pkg/issues/387.") return 1 @@ -200,4 +200,4 @@ def main(args): if __name__ == "__main__": - exit(main(sys.argv)) + sys.exit(main(sys.argv)) diff --git a/third_party/rules_pkg-0.9.1/pkg/private/make_starlark_library.bzl b/third_party/rules_pkg-1.0.1/pkg/private/make_starlark_library.bzl similarity index 100% rename from third_party/rules_pkg-0.9.1/pkg/private/make_starlark_library.bzl rename to third_party/rules_pkg-1.0.1/pkg/private/make_starlark_library.bzl diff --git a/third_party/rules_pkg-0.9.1/pkg/private/manifest.py b/third_party/rules_pkg-1.0.1/pkg/private/manifest.py similarity index 93% rename from third_party/rules_pkg-0.9.1/pkg/private/manifest.py rename to third_party/rules_pkg-1.0.1/pkg/private/manifest.py index 57b7ddda..6bfd3871 100644 --- a/third_party/rules_pkg-0.9.1/pkg/private/manifest.py +++ b/third_party/rules_pkg-1.0.1/pkg/private/manifest.py @@ -33,15 +33,19 @@ class ManifestEntry(object): mode: str user: str group: str + uid: int + gid: int origin: str = None - def __init__(self, type, dest, src, mode, user, group, origin = None): + def __init__(self, type, dest, src, mode, user, group, uid = None, gid = None, origin = None): self.type = type self.dest = dest self.src = src self.mode = mode self.user = user self.group = group + self.uid = uid + self.gid = gid self.origin = origin def __repr__(self): diff --git a/third_party/rules_pkg-0.9.1/pkg/private/pkg_files.bzl b/third_party/rules_pkg-1.0.1/pkg/private/pkg_files.bzl similarity index 51% rename from third_party/rules_pkg-0.9.1/pkg/private/pkg_files.bzl rename to third_party/rules_pkg-1.0.1/pkg/private/pkg_files.bzl index f0fe60b3..4ab0eb3f 100644 --- a/third_party/rules_pkg-0.9.1/pkg/private/pkg_files.bzl +++ b/third_party/rules_pkg-1.0.1/pkg/private/pkg_files.bzl @@ -34,7 +34,6 @@ Concepts and terms: load("//pkg:path.bzl", "compute_data_path", "dest_path") load( "//pkg:providers.bzl", - "PackageArtifactInfo", "PackageDirsInfo", "PackageFilegroupInfo", "PackageFilesInfo", @@ -44,9 +43,10 @@ load( ENTRY_IS_FILE = "file" # Entry is a file: take content from ENTRY_IS_LINK = "symlink" # Entry is a symlink: dest -> ENTRY_IS_DIR = "dir" # Entry is an empty dir -ENTRY_IS_TREE = "tree" # Entry is a tree artifact: take tree from +ENTRY_IS_TREE = "tree" # Entry is a tree artifact: take tree from ENTRY_IS_EMPTY_FILE = "empty-file" # Entry is a an empty file +# buildifier: disable=name-conventions _DestFile = provider( doc = """Information about each destination in the final package.""", fields = { @@ -57,10 +57,81 @@ _DestFile = provider( "link_to": "path to link to. src must not be set", "entry_type": "string. See ENTRY_IS_* values above.", "origin": "target which added this", + "uid": "uid, or empty", + "gid": "gid, or empty", }, ) -def _check_dest(content_map, dest, src, origin): +# buildifier: disable=name-conventions +_MappingContext = provider( + doc = """Fields passed to process_* methods.""", + fields = { + "content_map": "in/out The content_map we are building up", + "file_deps": "in/out list of file Depsets represented in the map", + "label": "ctx.label", + + # Behaviors + "allow_duplicates_with_different_content": "bool: don't fail when you double mapped files", + "include_runfiles": "bool: include runfiles", + "workspace_name": "string: name of the main workspace", + "strip_prefix": "strip_prefix", + + "path_mapper": "function to map destination paths", + + # Defaults + "default_mode": "Default mode to apply to file without a mode setting", + "default_user": "Default user name to apply to file without a user", + "default_group": "Default group name to apply to file without a group", + "default_uid": "Default numeric uid to apply to file without a uid", + "default_gid": "Default numeric gid to apply to file without a gid", + }, +) + +# buildifier: disable=function-docstring-args +def create_mapping_context_from_ctx( + ctx, + label, + allow_duplicates_with_different_content = None, + strip_prefix = None, + include_runfiles = None, + default_mode = None, + path_mapper = None + ): + """Construct a MappingContext. + + Args: See the provider definition. + + Returns: + _MappingContext + """ + if allow_duplicates_with_different_content == None: + allow_duplicates_with_different_content = ctx.attr.allow_duplicates_with_different_content if hasattr(ctx.attr, "allow_duplicates_with_different_content") else False + if strip_prefix == None: + strip_prefix = ctx.attr.strip_prefix if hasattr(ctx.attr, "strip_prefix") else "" + if include_runfiles == None: + include_runfiles = ctx.attr.include_runfiles if hasattr(ctx.attr, "include_runfiles") else False + if default_mode == None: + default_mode = ctx.attr.mode if hasattr(ctx.attr, "default_mode") else "" + + return _MappingContext( + content_map = dict(), + file_deps = list(), + label = label, + allow_duplicates_with_different_content = allow_duplicates_with_different_content, + strip_prefix = strip_prefix, + include_runfiles = include_runfiles, + workspace_name = ctx.workspace_name, + default_mode = default_mode, + path_mapper = path_mapper or (lambda x: x), + # TODO(aiuto): allow these to be passed in as needed. But, before doing + # that, explore defauilt_uid/gid as 0 rather than None + default_user = "", + default_group = "", + default_uid = None, + default_gid = None, + ) + +def _check_dest(content_map, dest, src, origin, allow_duplicates_with_different_content = False): old_entry = content_map.get(dest) if not old_entry: return @@ -72,96 +143,116 @@ def _check_dest(content_map, dest, src, origin): # people specify the owner in one place, but another overly broad glob # brings in the file with a different owner. if old_entry.src.path != src.path: - # buildifier: disable=print - print( - "Duplicate output path: <%s>, declared in %s and %s" % ( - dest, - origin, - content_map[dest].origin, - ), - "\n SRC:", + msg = "Duplicate output path: <%s>, declared in %s and %s\n SRC: %s" % ( + dest, + origin, + content_map[dest].origin, src, ) + if allow_duplicates_with_different_content: + # buildifier: disable=print + print("WARNING:", msg) + else: + # When we default to this behaviour, we should consider telling + # users the attribute to set to deal with this. + # For now though, let's not, since they've explicitly opted in. + fail(msg) -def _merge_attributes(info, mode, user, group): +def _merge_attributes(info, mode, user, group, uid, gid): if hasattr(info, "attributes"): attrs = info.attributes mode = attrs.get("mode") or mode user = attrs.get("user") or user group = attrs.get("group") or group - return (mode, user, group) -def _process_pkg_dirs(content_map, pkg_dirs_info, origin, default_mode, default_user, default_group): - attrs = _merge_attributes(pkg_dirs_info, default_mode, default_user, default_group) + new_uid = attrs.get("uid") + if new_uid != None: + uid = new_uid + new_gid = attrs.get("gid") + if new_gid != None: + gid = new_gid + return (mode, user, group, uid, gid) + +def _merge_context_attributes(info, mapping_context): + """Merge defaults from mapping context with those in the source provider. + + Args: + info: provider from a pkt_* target + mapping_context: MappingContext with the defaults. + """ + default_mode = mapping_context.default_mode if hasattr(mapping_context, "default_mode") else "" + default_user = mapping_context.default_user if hasattr(mapping_context, "default_user") else "" + default_group = mapping_context.default_group if hasattr(mapping_context, "default_group") else "" + default_uid = mapping_context.default_uid if hasattr(mapping_context, "default_uid") else "" + default_gid = mapping_context.default_gid if hasattr(mapping_context, "default_gid") else "" + return _merge_attributes(info, default_mode, default_user, default_group, default_uid, default_gid) + +def _process_pkg_dirs(mapping_context, pkg_dirs_info, origin): + attrs = _merge_context_attributes(pkg_dirs_info, mapping_context) for dir in pkg_dirs_info.dirs: dest = dir.strip("/") - _check_dest(content_map, dest, None, origin) - content_map[dest] = _DestFile( + _check_dest(mapping_context.content_map, dest, None, origin, mapping_context.allow_duplicates_with_different_content) + mapping_context.content_map[dest] = _DestFile( src = None, entry_type = ENTRY_IS_DIR, mode = attrs[0], user = attrs[1], group = attrs[2], + uid = attrs[3], + gid = attrs[4], origin = origin, ) -def _process_pkg_files(content_map, pkg_files_info, origin, default_mode, default_user, default_group): - attrs = _merge_attributes(pkg_files_info, default_mode, default_user, default_group) +def _process_pkg_files(mapping_context, pkg_files_info, origin): + attrs = _merge_context_attributes(pkg_files_info, mapping_context) for filename, src in pkg_files_info.dest_src_map.items(): dest = filename.strip("/") - _check_dest(content_map, dest, src, origin) - content_map[dest] = _DestFile( + _check_dest(mapping_context.content_map, dest, src, origin, mapping_context.allow_duplicates_with_different_content) + mapping_context.content_map[dest] = _DestFile( src = src, entry_type = ENTRY_IS_TREE if src.is_directory else ENTRY_IS_FILE, mode = attrs[0], user = attrs[1], group = attrs[2], + uid = attrs[3], + gid = attrs[4], origin = origin, ) -def _process_pkg_symlink(content_map, pkg_symlink_info, origin, default_mode, default_user, default_group): +def _process_pkg_symlink(mapping_context, pkg_symlink_info, origin): dest = pkg_symlink_info.destination.strip("/") - attrs = _merge_attributes(pkg_symlink_info, default_mode, default_user, default_group) - _check_dest(content_map, dest, None, origin) - content_map[dest] = _DestFile( + attrs = _merge_context_attributes(pkg_symlink_info, mapping_context) + _check_dest(mapping_context.content_map, dest, None, origin, mapping_context.allow_duplicates_with_different_content) + mapping_context.content_map[dest] = _DestFile( src = None, entry_type = ENTRY_IS_LINK, mode = attrs[0], user = attrs[1], group = attrs[2], + uid = attrs[3], + gid = attrs[4], origin = origin, link_to = pkg_symlink_info.target, ) -def _process_pkg_filegroup(content_map, pkg_filegroup_info, origin, default_mode, default_user, default_group): +def _process_pkg_filegroup(mapping_context, pkg_filegroup_info): if hasattr(pkg_filegroup_info, "pkg_dirs"): for d in pkg_filegroup_info.pkg_dirs: - _process_pkg_dirs(content_map, d[0], d[1], default_mode, default_user, default_group) + _process_pkg_dirs(mapping_context, d[0], d[1]) if hasattr(pkg_filegroup_info, "pkg_files"): for pf in pkg_filegroup_info.pkg_files: - _process_pkg_files(content_map, pf[0], pf[1], default_mode, default_user, default_group) + _process_pkg_files(mapping_context, pf[0], pf[1]) if hasattr(pkg_filegroup_info, "pkg_symlinks"): for psl in pkg_filegroup_info.pkg_symlinks: - _process_pkg_symlink(content_map, psl[0], psl[1], default_mode, default_user, default_group) + _process_pkg_symlink(mapping_context, psl[0], psl[1]) -def process_src( - content_map, - files, - src, - origin, - default_mode, - default_user, - default_group): +def process_src(mapping_context, src, origin): """Add an entry to the content map. Args: - content_map: in/out The content map - files: in/out list of file Depsets represented in the map + mapping_context: (r/w) a MappingContext src: Source Package*Info object origin: The rule instance adding this entry - default_mode: fallback mode to use for Package*Info elements without mode - default_user: fallback user to use for Package*Info elements without user - default_group: fallback mode to use for Package*Info elements without group Returns: True if src was a Package*Info and added to content_map. @@ -170,168 +261,135 @@ def process_src( # Gather the files for every srcs entry here, even if it is not from # a pkg_* rule. if DefaultInfo in src: - files.append(src[DefaultInfo].files) + mapping_context.file_deps.append(src[DefaultInfo].files) found_info = False if PackageFilesInfo in src: _process_pkg_files( - content_map, + mapping_context, src[PackageFilesInfo], origin, - default_mode = default_mode, - default_user = default_user, - default_group = default_group, ) found_info = True if PackageFilegroupInfo in src: _process_pkg_filegroup( - content_map, + mapping_context, src[PackageFilegroupInfo], - origin, - default_mode = default_mode, - default_user = default_user, - default_group = default_group, ) found_info = True if PackageSymlinkInfo in src: _process_pkg_symlink( - content_map, + mapping_context, src[PackageSymlinkInfo], origin, - default_mode = default_mode, - default_user = default_user, - default_group = default_group, ) found_info = True if PackageDirsInfo in src: _process_pkg_dirs( - content_map, + mapping_context, src[PackageDirsInfo], origin, - default_mode = "0555", - default_user = default_user, - default_group = default_group, ) found_info = True return found_info -def add_directory(content_map, dir_path, origin, mode = None, user = None, group = None): +def add_directory(mapping_context, dir_path, origin, mode = None, user = None, group = None, uid = None, gid = None): """Add an empty directory to the content map. Args: - content_map: The content map + mapping_context: (r/w) a MappingContext dir_path: Where to place the file in the package. origin: The rule instance adding this entry mode: fallback mode to use for Package*Info elements without mode user: fallback user to use for Package*Info elements without user group: fallback mode to use for Package*Info elements without group + uid: numeric uid + gid: numeric gid """ - content_map[dir_path.strip("/")] = _DestFile( + mapping_context.content_map[dir_path.strip("/")] = _DestFile( src = None, entry_type = ENTRY_IS_DIR, origin = origin, mode = mode, - user = user, - group = group, + user = user or mapping_context.default_user, + group = group or mapping_context.default_group, + uid = uid or mapping_context.default_uid, + gid = gid or mapping_context.default_gid, ) -def add_empty_file(content_map, dest_path, origin, mode = None, user = None, group = None): +def add_empty_file(mapping_context, dest_path, origin, mode = None, user = None, group = None, uid = None, gid = None): """Add a single file to the content map. Args: - content_map: The content map + mapping_context: (r/w) a MappingContext dest_path: Where to place the file in the package. origin: The rule instance adding this entry mode: fallback mode to use for Package*Info elements without mode user: fallback user to use for Package*Info elements without user group: fallback mode to use for Package*Info elements without group + uid: numeric uid + gid: numeric gid """ dest = dest_path.strip("/") - _check_dest(content_map, dest, None, origin) - content_map[dest] = _DestFile( + _check_dest(mapping_context.content_map, dest, None, origin) + mapping_context.content_map[dest] = _DestFile( src = None, entry_type = ENTRY_IS_EMPTY_FILE, origin = origin, mode = mode, - user = user, - group = group, + user = user or mapping_context.default_user, + group = group or mapping_context.default_group, + uid = uid or mapping_context.default_uid, + gid = gid or mapping_context.default_gid, ) -def add_label_list( - ctx, - content_map, - file_deps, - srcs, - default_mode = None, - default_user = None, - default_group = None): +def add_label_list(mapping_context, srcs): """Helper method to add a list of labels (typically 'srcs') to a content_map. Args: - ctx: rule context. - content_map: (r/w) The content map to update. - file_deps: (r/w) The list of file Depsets that srcs depend on. - srcs: List of source objects. - default_mode: fallback mode to use for Package*Info elements without mode - default_user: fallback user to use for Package*Info elements without user - default_group: fallback mode to use for Package*Info elements without group + mapping_context: (r/w) a MappingContext + srcs: List of source objects """ - if hasattr(ctx.attr, "include_runfiles"): - include_runfiles = ctx.attr.include_runfiles - else: - include_runfiles = False - # Compute the relative path data_path = compute_data_path( - ctx, - ctx.attr.strip_prefix if hasattr(ctx.attr, "strip_prefix") else "", + mapping_context.label, + mapping_context.strip_prefix, + ) + data_path_without_prefix = compute_data_path( + mapping_context.label, + ".", ) - data_path_without_prefix = compute_data_path(ctx, ".") for src in srcs: if not process_src( - content_map, - file_deps, + mapping_context, src = src, origin = src.label, - default_mode = default_mode, - default_user = default_user, - default_group = default_group, ): # Add in the files of srcs which are not pkg_* types add_from_default_info( - content_map, - file_deps, + mapping_context, src, data_path, data_path_without_prefix, - default_mode = default_mode, - default_user = default_user, - default_group = default_group, - include_runfiles = include_runfiles, + mapping_context.include_runfiles, + mapping_context.workspace_name, ) def add_from_default_info( - content_map, - file_deps, + mapping_context, src, data_path, data_path_without_prefix, - default_mode = None, - default_user = None, - default_group = None, - include_runfiles = False): + include_runfiles, + workspace_name): """Helper method to add the DefaultInfo of a target to a content_map. Args: - content_map: (r/w) The content map to update. - file_deps: (r/w) The list of file Depsets that srcs depend on. + mapping_context: (r/w) a MappingContext src: A source object. data_path: path to package data_path_without_prefix: path to the package after prefix stripping - default_mode: fallback mode to use for Package*Info elements without mode - default_user: fallback user to use for Package*Info elements without user - default_group: fallback mode to use for Package*Info elements without group include_runfiles: Include runfiles """ if not DefaultInfo in src: @@ -341,43 +399,54 @@ def add_from_default_info( the_executable = get_my_executable(src) all_files = src[DefaultInfo].files.to_list() for f in all_files: - d_path = dest_path(f, data_path, data_path_without_prefix) + d_path = mapping_context.path_mapper( + dest_path(f, data_path, data_path_without_prefix)) if f.is_directory: add_tree_artifact( - content_map, - d_path, - f, + mapping_context.content_map, + dest_path = d_path, + src = f, origin = src.label, - mode = default_mode, - user = default_user, - group = default_group, + mode = mapping_context.default_mode, + user = mapping_context.default_user, + group = mapping_context.default_group, ) else: - fmode = "0755" if f == the_executable else default_mode + fmode = "0755" if f == the_executable else mapping_context.default_mode add_single_file( - content_map, + mapping_context, dest_path = d_path, src = f, origin = src.label, mode = fmode, - user = default_user, - group = default_group, + user = mapping_context.default_user, + group = mapping_context.default_group, ) if include_runfiles: runfiles = src[DefaultInfo].default_runfiles if runfiles: - base_path = d_path + ".runfiles" + mapping_context.file_deps.append(runfiles.files) + + # Computing the runfiles root is subtle. It should be based off of + # the executable, but that is not always obvious. When in doubt, + # the first file of DefaultInfo.files should be the right target. + base_file = the_executable or all_files[0] + base_file_path = dest_path(base_file, data_path, data_path_without_prefix) + base_path = base_file_path + ".runfiles/" + workspace_name + for rf in runfiles.files.to_list(): - d_path = base_path + "/" + rf.short_path - fmode = "0755" if rf == the_executable else default_mode - _check_dest(content_map, d_path, rf, src.label) - content_map[d_path] = _DestFile( + d_path = mapping_context.path_mapper(base_path + "/" + rf.short_path) + fmode = "0755" if rf == the_executable else mapping_context.default_mode + _check_dest(mapping_context.content_map, d_path, rf, src.label, mapping_context.allow_duplicates_with_different_content) + mapping_context.content_map[d_path] = _DestFile( src = rf, - entry_type = ENTRY_IS_FILE, + entry_type = ENTRY_IS_TREE if rf.is_directory else ENTRY_IS_FILE, origin = src.label, mode = fmode, - user = default_user, - group = default_group, + user = mapping_context.default_user, + group = mapping_context.default_group, + uid = mapping_context.default_uid, + gid = mapping_context.default_gid, ) def get_my_executable(src): @@ -409,54 +478,61 @@ def get_my_executable(src): return ftr.executable return None -def add_single_file(content_map, dest_path, src, origin, mode = None, user = None, group = None): +def add_single_file(mapping_context, dest_path, src, origin, mode = None, user = None, group = None, uid = None, gid = None): """Add an single file to the content map. Args: - content_map: The content map + mapping_context: the MappingContext dest_path: Where to place the file in the package. src: Source object. Must have len(src[DefaultInfo].files) == 1 origin: The rule instance adding this entry mode: fallback mode to use for Package*Info elements without mode user: fallback user to use for Package*Info elements without user group: fallback mode to use for Package*Info elements without group + uid: numeric uid + gid: numeric gid """ dest = dest_path.strip("/") - _check_dest(content_map, dest, src, origin) - content_map[dest] = _DestFile( + _check_dest(mapping_context.content_map, dest, src, origin, mapping_context.allow_duplicates_with_different_content) + mapping_context.content_map[dest] = _DestFile( src = src, entry_type = ENTRY_IS_FILE, origin = origin, mode = mode, - user = user, - group = group, + user = user or mapping_context.default_user, + group = group or mapping_context.default_group, + uid = uid or mapping_context.default_uid, + gid = gid or mapping_context.default_gid, ) -def add_symlink(content_map, dest_path, src, origin, mode = None, user = None, group = None): +def add_symlink(mapping_context, dest_path, src, origin): """Add a symlink to the content map. + TODO(aiuto): This is a vestage left from the pkg_tar use. We could + converge code by having pkg_tar be a macro that expands symlinks to + pkg_symlink targets and srcs them in. + Args: - content_map: The content map + mapping_context: the MappingContext dest_path: Where to place the file in the package. src: Path to link to. origin: The rule instance adding this entry - mode: fallback mode to use for Package*Info elements without mode - user: fallback user to use for Package*Info elements without user - group: fallback mode to use for Package*Info elements without group """ dest = dest_path.strip("/") - _check_dest(content_map, dest, None, origin) - content_map[dest] = _DestFile( + _check_dest(mapping_context.content_map, dest, None, origin) + mapping_context.content_map[dest] = _DestFile( src = None, link_to = src, entry_type = ENTRY_IS_LINK, origin = origin, - mode = mode, - user = user, - group = group, + mode = mapping_context.default_mode, + user = mapping_context.default_user, + group = mapping_context.default_group, + uid = mapping_context.default_uid, + gid = mapping_context.default_gid, ) -def add_tree_artifact(content_map, dest_path, src, origin, mode = None, user = None, group = None): +def add_tree_artifact(content_map, dest_path, src, origin, mode = None, user = None, group = None, uid = None, gid = None): """Add an tree artifact (directory output) to the content map. Args: @@ -465,8 +541,10 @@ def add_tree_artifact(content_map, dest_path, src, origin, mode = None, user = N src: Source object. Must have len(src[DefaultInfo].files) == 1 origin: The rule instance adding this entry mode: fallback mode to use for Package*Info elements without mode - user: fallback user to use for Package*Info elements without user - group: fallback mode to use for Package*Info elements without group + user: User name for the entry (probably unused) + group: group name for the entry (probably unused) + uid: User id for the entry (probably unused) + gid: Group id for the entry (probably unused) """ content_map[dest_path] = _DestFile( src = src, @@ -475,9 +553,11 @@ def add_tree_artifact(content_map, dest_path, src, origin, mode = None, user = N mode = mode, user = user, group = group, + uid = uid, + gid = gid, ) -def write_manifest(ctx, manifest_file, content_map, use_short_path=False, pretty_print=False): +def write_manifest(ctx, manifest_file, content_map, use_short_path = False, pretty_print = False): """Write a content map to a manifest file. The format of this file is currently undocumented, as it is a private @@ -491,6 +571,7 @@ def write_manifest(ctx, manifest_file, content_map, use_short_path=False, pretty manifest_file: File object used as the output destination content_map: content_map (see concepts at top of file) use_short_path: write out the manifest file destinations in terms of "short" paths, suitable for `bazel run`. + pretty_print: indent the output nicely. Takes more space so it is off by default. """ ctx.actions.write( manifest_file, @@ -498,11 +579,11 @@ def write_manifest(ctx, manifest_file, content_map, use_short_path=False, pretty [ _encode_manifest_entry(dst, content_map[dst], use_short_path, pretty_print) for dst in sorted(content_map.keys()) - ] - ) + "\n]\n" + ], + ) + "\n]\n", ) -def _encode_manifest_entry(dest, df, use_short_path, pretty_print=False): +def _encode_manifest_entry(dest, df, use_short_path, pretty_print = False): entry_type = df.entry_type if hasattr(df, "entry_type") else ENTRY_IS_FILE if df.src: src = df.src.short_path if use_short_path else df.src.path @@ -522,16 +603,18 @@ def _encode_manifest_entry(dest, df, use_short_path, pretty_print=False): # Since this causes all sorts of chaos with our tests, be consistent across # all Bazel versions. origin_str = str(df.origin) - if not origin_str.startswith('@'): - origin_str = '@' + origin_str + if not origin_str.startswith("@"): + origin_str = "@" + origin_str data = { - "type": df.entry_type, + "type": entry_type, "src": src, "dest": dest.strip("/"), "mode": df.mode or "", "user": df.user or None, "group": df.group or None, + "uid": df.uid, + "gid": df.gid, "origin": origin_str, } diff --git a/third_party/rules_pkg-0.9.1/pkg/private/tar/BUILD b/third_party/rules_pkg-1.0.1/pkg/private/tar/BUILD similarity index 93% rename from third_party/rules_pkg-0.9.1/pkg/private/tar/BUILD rename to third_party/rules_pkg-1.0.1/pkg/private/tar/BUILD index fb9ab50a..68a2b437 100644 --- a/third_party/rules_pkg-0.9.1/pkg/private/tar/BUILD +++ b/third_party/rules_pkg-1.0.1/pkg/private/tar/BUILD @@ -18,7 +18,9 @@ All interfaces are subject to change at any time. load("@rules_python//python:defs.bzl", "py_binary", "py_library") -licenses(["notice"]) +package( + default_applicable_licenses = ["//:license"], +) filegroup( name = "standard_package", @@ -45,6 +47,7 @@ exports_files( py_binary( name = "build_tar", srcs = ["build_tar.py"], + imports = ["../../.."], python_version = "PY3", srcs_version = "PY3", visibility = ["//visibility:public"], @@ -62,6 +65,7 @@ py_library( srcs = [ "tar_writer.py", ], + imports = ["../../.."], srcs_version = "PY3", visibility = [ "//tests:__subpackages__", diff --git a/third_party/rules_pkg-0.9.1/pkg/private/tar/build_tar.py b/third_party/rules_pkg-1.0.1/pkg/private/tar/build_tar.py similarity index 89% rename from third_party/rules_pkg-0.9.1/pkg/private/tar/build_tar.py rename to third_party/rules_pkg-1.0.1/pkg/private/tar/build_tar.py index cea2167b..ce80a9fe 100644 --- a/third_party/rules_pkg-0.9.1/pkg/private/tar/build_tar.py +++ b/third_party/rules_pkg-1.0.1/pkg/private/tar/build_tar.py @@ -26,7 +26,7 @@ def normpath(path): - """Normalize a path to the format we need it. + r"""Normalize a path to the format we need it. os.path.normpath changes / to \ on windows, but tarfile needs / style paths. @@ -42,7 +42,7 @@ class TarFile(object): class DebError(Exception): pass - def __init__(self, output, directory, compression, compressor, default_mtime): + def __init__(self, output, directory, compression, compressor, create_parents, allow_dups_from_deps, default_mtime): # Directory prefix on all output paths d = directory.strip('/') self.directory = (d + '/') if d else None @@ -50,12 +50,16 @@ def __init__(self, output, directory, compression, compressor, default_mtime): self.compression = compression self.compressor = compressor self.default_mtime = default_mtime + self.create_parents = create_parents + self.allow_dups_from_deps = allow_dups_from_deps def __enter__(self): self.tarfile = tar_writer.TarFileWriter( self.output, self.compression, self.compressor, + self.create_parents, + self.allow_dups_from_deps, default_mtime=self.default_mtime) return self @@ -70,7 +74,14 @@ def normalize_path(self, path: str) -> str: # No path should ever come in with slashs on either end, but protect # against that anyway. dest = dest.strip('/') - if self.directory: + # This prevents a potential problem for users with both a prefix_dir and + # symlinks that also repeat the prefix_dir. The old behavior was that we + # would get just the symlink path. Now we are prefixing with the prefix, + # so you get the file in the wrong place. + # We silently de-dup that. If people come up with a real use case for + # the /a/b/a/b/rest... output we can start an issue and come up with a + # solution at that time. + if self.directory and not dest.startswith(self.directory): dest = self.directory + dest return dest @@ -175,7 +186,10 @@ def add_link(self, symlink, destination, mode=None, ids=None, names=None): names: (username, groupname) for the file to set ownership. An empty file will be created as `destfile` in the layer. """ - dest = self.normalize_path(symlink) + if not symlink.startswith("./"): + dest = self.normalize_path(symlink) + else: + dest = symlink self.tarfile.add_file( dest, tarfile.SYMTYPE, @@ -293,7 +307,7 @@ def add_tree(self, tree_top, destpath, mode=None, ids=None, names=None): gname=names[1]) def add_manifest_entry(self, entry, file_attributes): - # Use the pkg_tar mode/owner remaping as a fallback + # Use the pkg_tar mode/owner remapping as a fallback non_abs_path = entry.dest.strip('/') if file_attributes: attrs = file_attributes(non_abs_path) @@ -308,14 +322,19 @@ def add_manifest_entry(self, entry, file_attributes): else: # Use group that legacy tar process would assign attrs['names'] = (entry.user, attrs.get('names')[1]) + if entry.uid is not None: + if entry.gid is not None: + attrs['ids'] = (entry.uid, entry.gid) + else: + attrs['ids'] = (entry.uid, entry.uid) if entry.type == manifest.ENTRY_IS_LINK: self.add_link(entry.dest, entry.src, **attrs) elif entry.type == manifest.ENTRY_IS_DIR: - self.add_empty_dir(entry.dest, **attrs) + self.add_empty_dir(self.normalize_path(entry.dest), **attrs) elif entry.type == manifest.ENTRY_IS_TREE: self.add_tree(entry.src, entry.dest, **attrs) elif entry.type == manifest.ENTRY_IS_EMPTY_FILE: - self.add_empty_file(entry.dest, **attrs) + self.add_empty_file(self.normalize_path(entry.dest), **attrs) else: self.add_file(entry.src, entry.dest, **attrs) @@ -371,6 +390,13 @@ def main(): 'path/to/file=root.root.') parser.add_argument('--stamp_from', default='', help='File to find BUILD_STAMP in') + parser.add_argument('--create_parents', + action='store_true', + help='Automatically creates parent directories implied by a' + ' prefix if they do not exist') + parser.add_argument('--allow_dups_from_deps', + action='store_true', + help='') options = parser.parse_args() # Parse modes arguments @@ -420,7 +446,9 @@ def main(): directory = helpers.GetFlagValue(options.directory), compression = options.compression, compressor = options.compressor, - default_mtime=default_mtime) as output: + default_mtime=default_mtime, + create_parents=options.create_parents, + allow_dups_from_deps=options.allow_dups_from_deps) as output: def file_attributes(filename): if filename.startswith('/'): diff --git a/third_party/rules_pkg-0.9.1/pkg/private/tar/tar.bzl b/third_party/rules_pkg-1.0.1/pkg/private/tar/tar.bzl similarity index 70% rename from third_party/rules_pkg-0.9.1/pkg/private/tar/tar.bzl rename to third_party/rules_pkg-1.0.1/pkg/private/tar/tar.bzl index dc901a57..120820a8 100644 --- a/third_party/rules_pkg-0.9.1/pkg/private/tar/tar.bzl +++ b/third_party/rules_pkg-1.0.1/pkg/private/tar/tar.bzl @@ -13,8 +13,7 @@ # limitations under the License. """Rules for making .tar files.""" -load("//pkg:path.bzl", "compute_data_path", "dest_path") -load("//pkg:providers.bzl", "PackageArtifactInfo", "PackageVariablesInfo") +load("//pkg:providers.bzl", "PackageVariablesInfo") load( "//pkg/private:pkg_files.bzl", "add_directory", @@ -22,8 +21,7 @@ load( "add_label_list", "add_single_file", "add_symlink", - "add_tree_artifact", - "process_src", + "create_mapping_context_from_ctx", "write_manifest", ) load("//pkg/private:util.bzl", "setup_output_files", "substitute_package_variables") @@ -58,15 +56,7 @@ def _pkg_tar_impl(ctx): # Files needed by rule implementation at runtime files = [] - - outputs, output_file, output_name = setup_output_files(ctx) - - # Compute the relative path - data_path = compute_data_path(ctx, ctx.attr.strip_prefix) - data_path_without_prefix = compute_data_path(ctx, ".") - - # Find a list of path remappings to apply. - remap_paths = ctx.attr.remap_paths + outputs, output_file, _ = setup_output_files(ctx) # Start building the arguments. args = ctx.actions.args() @@ -112,46 +102,36 @@ def _pkg_tar_impl(ctx): args.add("--mtime", "%d" % ctx.attr.mtime) if ctx.attr.portable_mtime: args.add("--mtime", "portable") + if ctx.attr.modes: + for key in ctx.attr.modes: + args.add("--modes", "%s=%s" % (_quote(key), ctx.attr.modes[key])) + if ctx.attr.owners: + for key in ctx.attr.owners: + args.add("--owners", "%s=%s" % (_quote(key), ctx.attr.owners[key])) + if ctx.attr.ownernames: + for key in ctx.attr.ownernames: + args.add( + "--owner_names", + "%s=%s" % (_quote(key), ctx.attr.ownernames[key]), + ) # Now we begin processing the files. - file_deps = [] # inputs we depend on - content_map = {} # content handled in the manifest - - # Start with all the pkg_* inputs - for src in ctx.attr.srcs: - if not process_src( - content_map, - file_deps, - src = src, - origin = src.label, - default_mode = None, - default_user = None, - default_group = None, - ): - src_files = src[DefaultInfo].files.to_list() - if ctx.attr.include_runfiles: - runfiles = src[DefaultInfo].default_runfiles - if runfiles: - file_deps.append(runfiles.files) - src_files.extend(runfiles.files.to_list()) + path_mapper = None + if ctx.attr.remap_paths: + path_mapper = lambda path: _remap(ctx.attr.remap_paths, path) - # Add in the files of srcs which are not pkg_* types - for f in src_files: - d_path = dest_path(f, data_path, data_path_without_prefix) - if f.is_directory: - add_tree_artifact(content_map, d_path, f, src.label) - else: - # Note: This extra remap is the bottleneck preventing this - # large block from being a utility method as shown below. - # Should we disallow mixing pkg_files in srcs with remap? - # I am fine with that if it makes the code more readable. - dest = _remap(remap_paths, d_path) - add_single_file(content_map, dest, f, src.label) + mapping_context = create_mapping_context_from_ctx( + ctx, + label = ctx.label, + include_runfiles = ctx.attr.include_runfiles, + strip_prefix = ctx.attr.strip_prefix, + # build_tar does the default modes. Consider moving attribute mapping + # into mapping_context. + default_mode = None, + path_mapper = path_mapper, + ) - # TODO(aiuto): I want the code to look like this, but we don't have lambdas. - # transform_path = lambda f: _remap( - # remap_paths, dest_path(f, data_path, data_path_without_prefix)) - # add_label_list(ctx, content_map, file_deps, ctx.attr.srcs, transform_path) + add_label_list(mapping_context, srcs = ctx.attr.srcs) # The files attribute is a map of labels to destinations. We can add them # directly to the content map. @@ -159,35 +139,23 @@ def _pkg_tar_impl(ctx): target_files = target.files.to_list() if len(target_files) != 1: fail("Each input must describe exactly one file.", attr = "files") - file_deps.append(depset([target_files[0]])) + mapping_context.file_deps.append(depset([target_files[0]])) add_single_file( - content_map, + mapping_context, f_dest_path, target_files[0], target.label, ) - if ctx.attr.modes: - for key in ctx.attr.modes: - args.add("--modes", "%s=%s" % (_quote(key), ctx.attr.modes[key])) - if ctx.attr.owners: - for key in ctx.attr.owners: - args.add("--owners", "%s=%s" % (_quote(key), ctx.attr.owners[key])) - if ctx.attr.ownernames: - for key in ctx.attr.ownernames: - args.add( - "--owner_names", - "%s=%s" % (_quote(key), ctx.attr.ownernames[key]), - ) for empty_file in ctx.attr.empty_files: - add_empty_file(content_map, empty_file, ctx.label) + add_empty_file(mapping_context, empty_file, ctx.label) for empty_dir in ctx.attr.empty_dirs or []: - add_directory(content_map, empty_dir, ctx.label) + add_directory(mapping_context, empty_dir, ctx.label) for f in ctx.files.deps: args.add("--tar", f.path) for link in ctx.attr.symlinks: add_symlink( - content_map, + mapping_context, link, ctx.attr.symlinks[link], ctx.label, @@ -199,20 +167,29 @@ def _pkg_tar_impl(ctx): manifest_file = ctx.actions.declare_file(ctx.label.name + ".manifest") files.append(manifest_file) - write_manifest(ctx, manifest_file, content_map) + write_manifest(ctx, manifest_file, mapping_context.content_map) args.add("--manifest", manifest_file.path) args.set_param_file_format("flag_per_line") args.use_param_file("@%s", use_always = False) - inputs = depset(direct = ctx.files.deps + files, transitive = file_deps) + if ctx.attr.create_parents: + args.add("--create_parents") + + if ctx.attr.allow_duplicates_from_deps: + args.add("--allow_dups_from_deps") + + inputs = depset( + direct = ctx.files.deps + files, + transitive = mapping_context.file_deps, + ) ctx.actions.run( mnemonic = "PackageTar", progress_message = "Writing: %s" % output_file.path, inputs = inputs, tools = [ctx.executable.compressor] if ctx.executable.compressor else [], - executable = ctx.executable.build_tar, + executable = ctx.executable._build_tar, arguments = [args], outputs = [output_file], env = { @@ -233,12 +210,7 @@ def _pkg_tar_impl(ctx): # or this OutputGroup might be totally removed. # Depend on it at your own risk! OutputGroupInfo( - manifest = [manifest_file], - ), - PackageArtifactInfo( - label = ctx.label.name, - file = output_file, - file_name = output_name, + manifest = [manifest_file], ), ] @@ -246,38 +218,76 @@ def _pkg_tar_impl(ctx): pkg_tar_impl = rule( implementation = _pkg_tar_impl, attrs = { - "strip_prefix": attr.string(), + "strip_prefix": attr.string( + doc = """(note: Use strip_prefix = "." to strip path to the package but preserve relative paths of sub directories beneath the package.)""", + ), "package_dir": attr.string( - doc = """Prefix to be prepend to all paths written.""" + doc = """Prefix to be prepend to all paths written. + + This is applied as a final step, while writing to the archive. + Any other attributes (e.g. symlinks) which specify a path, must do so relative to package_dir. + The value may contain variables. See [package_file_name](#package_file_name) for examples. + """, ), "package_dir_file": attr.label(allow_single_file = True), - "deps": attr.label_list(allow_files = tar_filetype), - "srcs": attr.label_list(allow_files = True), - "files": attr.label_keyed_string_dict(allow_files = True), + "deps": attr.label_list( + doc = """tar files which will be unpacked and repacked into the archive.""", + allow_files = tar_filetype, + ), + "srcs": attr.label_list( + doc = """Inputs which will become part of the tar archive.""", + allow_files = True, + ), + "files": attr.label_keyed_string_dict( + doc = """Obsolete. Do not use.""", + allow_files = True, + ), "mode": attr.string(default = "0555"), "modes": attr.string_dict(), "mtime": attr.int(default = _DEFAULT_MTIME), "portable_mtime": attr.bool(default = True), - "owner": attr.string(default = "0.0"), + "owner": attr.string( + doc = """Default numeric owner.group to apply to files when not set via pkg_attributes.""", + default = "0.0", + ), "ownername": attr.string(default = "."), "owners": attr.string_dict(), "ownernames": attr.string_dict(), "extension": attr.string(default = "tar"), "symlinks": attr.string_dict(), "empty_files": attr.string_list(), - "include_runfiles": attr.bool(), + "include_runfiles": attr.bool( + doc = ("""Include runfiles for executables. These appear as they would in bazel-bin.""" + + """For example: 'path/to/myprog.runfiles/path/to/my_data.txt'."""), + ), "empty_dirs": attr.string_list(), "remap_paths": attr.string_dict(), - "compressor": attr.label(executable = True, cfg = "exec"), - "compressor_args": attr.string(), + "compressor": attr.label( + doc = """External tool which can compress the archive.""", + executable = True, + cfg = "exec", + ), + "compressor_args": attr.string( + doc = """Arg list for `compressor`.""", + ), + "create_parents": attr.bool(default = True), + "allow_duplicates_from_deps": attr.bool(default = False), # Common attributes "out": attr.output(mandatory = True), - "package_file_name": attr.string(doc = "See Common Attributes"), + "package_file_name": attr.string(doc = "See [Common Attributes](#package_file_name)"), "package_variables": attr.label( - doc = "See Common Attributes", + doc = "See [Common Attributes](#package_variables)", providers = [PackageVariablesInfo], ), + "allow_duplicates_with_different_content": attr.bool( + default = True, + doc = """If true, will allow you to reference multiple pkg_* which conflict +(writing different content or metadata to the same destination). +Such behaviour is always incorrect, but we provide a flag to support it in case old +builds were accidentally doing it. Never explicitly set this to true for new code. +""", + ), "stamp": attr.int( doc = """Enable file time stamping. Possible values:
  • stamp = 1: Use the time of the build as the modification time of each file in the archive. @@ -292,21 +302,22 @@ pkg_tar_impl = rule( "private_stamp_detect": attr.bool(default = False), # Implicit dependencies. - "build_tar": attr.label( + "_build_tar": attr.label( default = Label("//pkg/private/tar:build_tar"), cfg = "exec", executable = True, allow_files = True, ), }, - provides = [PackageArtifactInfo], ) +# buildifier: disable=function-docstring-args def pkg_tar(name, **kwargs): """Creates a .tar file. See pkg_tar_impl. @wraps(pkg_tar_impl) """ + # Compatibility with older versions of pkg_tar that define files as # a flat list of labels. if "srcs" not in kwargs: diff --git a/third_party/rules_pkg-0.9.1/pkg/private/tar/tar_writer.py b/third_party/rules_pkg-1.0.1/pkg/private/tar/tar_writer.py similarity index 91% rename from third_party/rules_pkg-0.9.1/pkg/private/tar/tar_writer.py rename to third_party/rules_pkg-1.0.1/pkg/private/tar/tar_writer.py index 1cef38fa..cee232da 100644 --- a/third_party/rules_pkg-0.9.1/pkg/private/tar/tar_writer.py +++ b/third_party/rules_pkg-1.0.1/pkg/private/tar/tar_writer.py @@ -46,6 +46,8 @@ def __init__(self, name, compression='', compressor='', + create_parents=False, + allow_dups_from_deps=True, default_mtime=None, preserve_tar_mtimes=True): """TarFileWriter wraps tarfile.open(). @@ -87,7 +89,7 @@ def __init__(self, # The Tarfile class doesn't allow us to specify gzip's mtime attribute. # Instead, we manually reimplement gzopen from tarfile.py and set mtime. self.fileobj = gzip.GzipFile( - filename=name, mode='w', compresslevel=9, mtime=self.default_mtime) + filename=name, mode='w', compresslevel=6, mtime=self.default_mtime) self.compressor_proc = None if self.compressor_cmd: mode = 'w|' @@ -106,6 +108,8 @@ def __init__(self, # we can adjust that here based on the setting of root_dirctory. self.directories.add('/') self.directories.add('./') + self.create_parents = create_parents + self.allow_dups_from_deps = allow_dups_from_deps def __enter__(self): return self @@ -123,14 +127,15 @@ def _addfile(self, info, fileobj=None): # Enforce the ending / for directories so we correctly deduplicate. if not info.name.endswith('/'): info.name += '/' - if not self._have_added(info.name): - self.tar.addfile(info, fileobj) - self.members.add(info.name) - if info.type == tarfile.DIRTYPE: - self.directories.add(info.name) - elif info.type != tarfile.DIRTYPE: - print('Duplicate file in archive: %s, ' - 'picking first occurrence' % info.name) + if not self.allow_dups_from_deps and self._have_added(info.name): + print('Duplicate file in archive: %s, ' + 'picking first occurrence' % info.name) + return + + self.tar.addfile(info, fileobj) + self.members.add(info.name) + if info.type == tarfile.DIRTYPE: + self.directories.add(info.name) def add_directory_path(self, path, @@ -152,7 +157,7 @@ def add_directory_path(self, mode: unix permission mode of the file, default: 0755. """ assert path[-1] == '/' - if not path or self._have_added(path): + if not path: return if _DEBUG_VERBOSITY > 1: print('DEBUG: adding directory', path) @@ -166,12 +171,14 @@ def add_directory_path(self, tarinfo.gname = gname self._addfile(tarinfo) - def add_parents(self, path, uid=0, gid=0, uname='', gname='', mtime=0, mode=0o755): + def conditionally_add_parents(self, path, uid=0, gid=0, uname='', gname='', mtime=0, mode=0o755): dirs = path.split('/') parent_path = '' for next_level in dirs[0:-1]: parent_path = parent_path + next_level + '/' - self.add_directory_path( + + if self.create_parents and not self._have_added(parent_path): + self.add_directory_path( parent_path, uid=uid, gid=gid, @@ -212,14 +219,14 @@ def add_file(self, return if name == '.': return - if name in self.members: + if not self.allow_dups_from_deps and name in self.members: return if mtime is None: mtime = self.default_mtime # Make directories up the file - self.add_parents(name, mtime=mtime, mode=0o755, uid=uid, gid=gid, uname=uname, gname=gname) + self.conditionally_add_parents(name, mtime=mtime, mode=0o755, uid=uid, gid=gid, uname=uname, gname=gname) tarinfo = tarfile.TarInfo(name) tarinfo.mtime = mtime @@ -291,7 +298,7 @@ def add_tar(self, if prefix: in_name = os.path.normpath(prefix + in_name).replace(os.path.sep, '/') tarinfo.name = in_name - self.add_parents( + self.conditionally_add_parents( path=tarinfo.name, mtime=tarinfo.mtime, mode=0o755, diff --git a/third_party/rules_pkg-0.9.1/pkg/private/util.bzl b/third_party/rules_pkg-1.0.1/pkg/private/util.bzl similarity index 85% rename from third_party/rules_pkg-0.9.1/pkg/private/util.bzl rename to third_party/rules_pkg-1.0.1/pkg/private/util.bzl index b51598e1..7225456b 100644 --- a/third_party/rules_pkg-0.9.1/pkg/private/util.bzl +++ b/third_party/rules_pkg-1.0.1/pkg/private/util.bzl @@ -29,10 +29,7 @@ def setup_output_files(ctx, package_file_name = None, default_output_file = None Callers should: - write to `output_file` - add `outputs` to their returned `DefaultInfo(files)` provider - - return a `PackageArtifactInfo` provider of the form: - label: `ctx.label.name` - file: `output_file` - file_name: `output_name` + - Possibly add a distinguishing element to OutputGroups Args: ctx: rule context @@ -85,5 +82,14 @@ def substitute_package_variables(ctx, attribute_value): # Map $(var) to {x} and then use format for substitution. # This is brittle and I hate it. We should have template substitution - # in the Starlark runtime. - return attribute_value.replace("$(", "{").replace(")", "}").format(**vars) + # in the Starlark runtime. This loop compensates for mismatched counts + # of $(foo) so that we don't try replace things like (bar) because we + # have no regex matching + for _ in range(attribute_value.count("$(")): + if attribute_value.find(")") == -1: + fail("mismatched variable declaration") + + attribute_value = attribute_value.replace("$(", "{", 1) + attribute_value = attribute_value.replace(")", "}", 1) + + return attribute_value.format(**vars) \ No newline at end of file diff --git a/third_party/rules_pkg-0.9.1/pkg/private/zip/BUILD b/third_party/rules_pkg-1.0.1/pkg/private/zip/BUILD similarity index 94% rename from third_party/rules_pkg-0.9.1/pkg/private/zip/BUILD rename to third_party/rules_pkg-1.0.1/pkg/private/zip/BUILD index eba47091..4823e90c 100644 --- a/third_party/rules_pkg-0.9.1/pkg/private/zip/BUILD +++ b/third_party/rules_pkg-1.0.1/pkg/private/zip/BUILD @@ -16,7 +16,7 @@ All interfaces are subject to change at any time. """ -load("@rules_python//python:defs.bzl", "py_library") +load("@rules_python//python:defs.bzl", "py_binary") package(default_applicable_licenses = ["//:license"]) @@ -48,6 +48,7 @@ exports_files( py_binary( name = "build_zip", srcs = ["build_zip.py"], + imports = ["../../.."], python_version = "PY3", srcs_version = "PY3", visibility = ["//visibility:public"], diff --git a/third_party/rules_pkg-0.9.1/pkg/private/zip/__init__.py b/third_party/rules_pkg-1.0.1/pkg/private/zip/__init__.py similarity index 100% rename from third_party/rules_pkg-0.9.1/pkg/private/zip/__init__.py rename to third_party/rules_pkg-1.0.1/pkg/private/zip/__init__.py diff --git a/third_party/rules_pkg-0.9.1/pkg/private/zip/build_zip.py b/third_party/rules_pkg-1.0.1/pkg/private/zip/build_zip.py similarity index 83% rename from third_party/rules_pkg-0.9.1/pkg/private/zip/build_zip.py rename to third_party/rules_pkg-1.0.1/pkg/private/zip/build_zip.py index 64c42060..c2ff3644 100644 --- a/third_party/rules_pkg-0.9.1/pkg/private/zip/build_zip.py +++ b/third_party/rules_pkg-1.0.1/pkg/private/zip/build_zip.py @@ -15,7 +15,9 @@ import argparse import datetime +import logging import os +import sys import zipfile from pkg.private import build_info @@ -24,9 +26,10 @@ ZIP_EPOCH = 315532800 # Unix dir bit and Windows dir bit. Magic from zip spec -UNIX_DIR_BIT = 0o40000 -MSDOS_DIR_BIT = 0x10 +UNIX_FILE_BIT = 0o100000 UNIX_SYMLINK_BIT = 0o120000 +UNIX_DIR_BIT = 0o040000 +MSDOS_DIR_BIT = 0x10 def _create_argument_parser(): """Creates the command line arg parser.""" @@ -46,6 +49,12 @@ def _create_argument_parser(): parser.add_argument( '-m', '--mode', help='The file system mode to use for files added into the zip.') + parser.add_argument( + '-c', '--compression_type', + help='The compression type to use') + parser.add_argument( + '-l', '--compression_level', + help='The compression level to use') parser.add_argument('--manifest', help='manifest of contents to add to the layer.', required=True) @@ -65,13 +74,13 @@ def _combine_paths(left, right): def parse_date(ts): - ts = datetime.datetime.utcfromtimestamp(ts) + ts = datetime.datetime.fromtimestamp(ts, tz=datetime.timezone.utc) return (ts.year, ts.month, ts.day, ts.hour, ts.minute, ts.second) class ZipWriter(object): - def __init__(self, output_path: str, time_stamp: int, default_mode: int): + def __init__(self, output_path: str, time_stamp: int, default_mode: int, compression_type: str, compression_level: int): """Create a writer. You must close() after use or use in a 'with' statement. @@ -84,7 +93,15 @@ def __init__(self, output_path: str, time_stamp: int, default_mode: int): self.output_path = output_path self.time_stamp = time_stamp self.default_mode = default_mode - self.zip_file = zipfile.ZipFile(self.output_path, mode='w') + compressions = { + "deflated": zipfile.ZIP_DEFLATED, + "lzma": zipfile.ZIP_LZMA, + "bzip2": zipfile.ZIP_BZIP2, + "stored": zipfile.ZIP_STORED + } + self.compression_type = compressions[compression_type] + self.compression_level = compression_level + self.zip_file = zipfile.ZipFile(self.output_path, mode='w', compression=self.compression_type) def __enter__(self): return self @@ -96,6 +113,15 @@ def close(self): self.zip_file.close() self.zip_file = None + def writestr(self, entry_info, content: str, compresslevel: int): + if sys.version_info >= (3, 7): + self.zip_file.writestr(entry_info, content, compresslevel=compresslevel) + else: + # Python 3.6 and lower don't support compresslevel + self.zip_file.writestr(entry_info, content) + if compresslevel != 6: + logging.warn("Custom compresslevel is not supported with python < 3.7") + def make_zipinfo(self, path: str, mode: str): """Create a Zipinfo. @@ -134,17 +160,18 @@ def add_manifest_entry(self, entry): user = entry.user group = entry.group - # Use the pkg_tar mode/owner remaping as a fallback + # Use the pkg_tar mode/owner remapping as a fallback dst_path = dest.strip('/') if entry_type == manifest.ENTRY_IS_DIR and not dst_path.endswith('/'): dst_path += '/' entry_info = self.make_zipinfo(path=dst_path, mode=mode) if entry_type == manifest.ENTRY_IS_FILE: - entry_info.compress_type = zipfile.ZIP_DEFLATED + entry_info.compress_type = self.compression_type # Using utf-8 for the file names is for python <3.7 compatibility. + entry_info.external_attr |= UNIX_FILE_BIT << 16 with open(src.encode('utf-8'), 'rb') as src_content: - self.zip_file.writestr(entry_info, src_content.read()) + self.writestr(entry_info, src_content.read(), compresslevel=self.compression_level) elif entry_type == manifest.ENTRY_IS_DIR: entry_info.compress_type = zipfile.ZIP_STORED # Set directory bits @@ -158,7 +185,7 @@ def add_manifest_entry(self, entry): elif entry_type == manifest.ENTRY_IS_TREE: self.add_tree(src, dst_path, mode) elif entry_type == manifest.ENTRY_IS_EMPTY_FILE: - entry_info.compress_type = zipfile.ZIP_DEFLATED + entry_info.compress_type = zipfile.ZIP_STORED self.zip_file.writestr(entry_info, '') else: raise Exception('Unknown type for manifest entry:', entry) @@ -213,9 +240,9 @@ def add_tree(self, tree_top: str, destpath: str, mode: int): else: f_mode = mode entry_info = self.make_zipinfo(path=path, mode=f_mode) - entry_info.compress_type = zipfile.ZIP_DEFLATED + entry_info.compress_type = self.compression_type with open(content_path, 'rb') as src: - self.zip_file.writestr(entry_info, src.read()) + self.writestr(entry_info, src.read(), compresslevel=self.compression_level) else: # Implicitly created directory dir_path = path @@ -251,6 +278,8 @@ def _load_manifest(prefix, manifest_path): mode = "0o755", user = None, group = None, + uid = None, + gid = None, origin = "parent directory of {}".format(manifest_map[dest].origin), ) @@ -264,10 +293,11 @@ def main(args): default_mode = None if args.mode: default_mode = int(args.mode, 8) + compression_level = int(args.compression_level) manifest = _load_manifest(args.directory, args.manifest) with ZipWriter( - args.output, time_stamp=ts, default_mode=default_mode) as zip_out: + args.output, time_stamp=ts, default_mode=default_mode, compression_type=args.compression_type, compression_level=compression_level) as zip_out: for entry in manifest: zip_out.add_manifest_entry(entry) diff --git a/third_party/rules_pkg-0.9.1/pkg/private/zip/zip.bzl b/third_party/rules_pkg-1.0.1/pkg/private/zip/zip.bzl similarity index 68% rename from third_party/rules_pkg-0.9.1/pkg/private/zip/zip.bzl rename to third_party/rules_pkg-1.0.1/pkg/private/zip/zip.bzl index d66e5362..5531bc26 100644 --- a/third_party/rules_pkg-0.9.1/pkg/private/zip/zip.bzl +++ b/third_party/rules_pkg-1.0.1/pkg/private/zip/zip.bzl @@ -13,54 +13,57 @@ # limitations under the License. """Zip archive creation rule and associated logic.""" -load("//pkg:path.bzl", "compute_data_path", "dest_path") load( "//pkg:providers.bzl", - "PackageArtifactInfo", "PackageVariablesInfo", ) -load( - "//pkg/private:util.bzl", - "setup_output_files", - "substitute_package_variables", -) load( "//pkg/private:pkg_files.bzl", "add_label_list", + "create_mapping_context_from_ctx", "write_manifest", ) +load( + "//pkg/private:util.bzl", + "setup_output_files", + "substitute_package_variables", +) _stamp_condition = Label("//pkg/private:private_stamp_detect") def _pkg_zip_impl(ctx): - outputs, output_file, output_name = setup_output_files(ctx) + outputs, output_file, _ = setup_output_files(ctx) args = ctx.actions.args() args.add("-o", output_file.path) args.add("-d", substitute_package_variables(ctx, ctx.attr.package_dir)) args.add("-t", ctx.attr.timestamp) args.add("-m", ctx.attr.mode) + args.add("-c", str(ctx.attr.compression_type)) + args.add("-l", ctx.attr.compression_level) inputs = [] if ctx.attr.stamp == 1 or (ctx.attr.stamp == -1 and ctx.attr.private_stamp_detect): args.add("--stamp_from", ctx.version_file.path) inputs.append(ctx.version_file) - data_path = compute_data_path(ctx, ctx.attr.strip_prefix) - data_path_without_prefix = compute_data_path(ctx, ".") - - content_map = {} # content handled in the manifest - file_deps = [] # list of Depsets needed by srcs - add_label_list(ctx, content_map, file_deps, srcs = ctx.attr.srcs) + mapping_context = create_mapping_context_from_ctx( + ctx, + label = ctx.label, + include_runfiles = ctx.attr.include_runfiles, + strip_prefix = ctx.attr.strip_prefix, + default_mode = ctx.attr.mode, + ) + add_label_list(mapping_context, srcs = ctx.attr.srcs) manifest_file = ctx.actions.declare_file(ctx.label.name + ".manifest") inputs.append(manifest_file) - write_manifest(ctx, manifest_file, content_map) + write_manifest(ctx, manifest_file, mapping_context.content_map) args.add("--manifest", manifest_file.path) args.set_param_file_format("multiline") args.use_param_file("@%s") - all_inputs = depset(direct = inputs, transitive = file_deps) + all_inputs = depset(direct = inputs, transitive = mapping_context.file_deps) ctx.actions.run( mnemonic = "PackageZip", @@ -81,11 +84,6 @@ def _pkg_zip_impl(ctx): files = depset([output_file]), runfiles = ctx.runfiles(files = outputs), ), - PackageArtifactInfo( - label = ctx.label.name, - file = output_file, - file_name = output_name, - ), ] pkg_zip_impl = rule( @@ -101,10 +99,14 @@ pkg_zip_impl = rule( default = "0555", ), "package_dir": attr.string( - doc = """The prefix to add to all all paths in the archive.""", + doc = """Prefix to be prepend to all paths written. +The name may contain variables, same as [package_file_name](#package_file_name)""", default = "/", ), "strip_prefix": attr.string(), + "include_runfiles": attr.bool( + doc = """See standard attributes.""", + ), "timestamp": attr.int( doc = """Time stamp to place on all files in the archive, expressed as seconds since the Unix Epoch, as per RFC 3339. The default is January 01, @@ -115,15 +117,25 @@ Jan 1, 1980 will be rounded up and the precision in the zip file is limited to a granularity of 2 seconds.""", default = 315532800, ), + "compression_level": attr.int( + default = 6, + doc = "The compression level to use, 1 is the fastest, 9 gives the smallest results. 0 skips compression, depending on the method used", + ), + "compression_type": attr.string( + default = "deflated", + doc = """The compression to use. Note that lzma and bzip2 might not be supported by all readers. +The list of compressions is the same as Python's ZipFile: https://docs.python.org/3/library/zipfile.html#zipfile.ZIP_STORED""", + values = ["deflated", "lzma", "bzip2", "stored"], + ), # Common attributes "out": attr.output( doc = """output file name. Default: name + ".zip".""", mandatory = True, ), - "package_file_name": attr.string(doc = "See Common Attributes"), + "package_file_name": attr.string(doc = "See [Common Attributes](#package_file_name)"), "package_variables": attr.label( - doc = "See Common Attributes", + doc = "See [Common Attributes](#package_variables)", providers = [PackageVariablesInfo], ), "stamp": attr.int( @@ -134,7 +146,14 @@ limited to a granularity of 2 seconds.""", """, default = 0, ), - + "allow_duplicates_with_different_content": attr.bool( + default = True, + doc = """If true, will allow you to reference multiple pkg_* which conflict +(writing different content or metadata to the same destination). +Such behaviour is always incorrect, but we provide a flag to support it in case old +builds were accidentally doing it. Never explicitly set this to true for new code. +""", + ), # Is --stamp set on the command line? # TODO(https://github.com/bazelbuild/rules_pkg/issues/340): Remove this. "private_stamp_detect": attr.bool(default = False), @@ -147,7 +166,6 @@ limited to a granularity of 2 seconds.""", allow_files = True, ), }, - provides = [PackageArtifactInfo], ) def pkg_zip(name, out = None, **kwargs): @@ -156,7 +174,9 @@ def pkg_zip(name, out = None, **kwargs): @wraps(pkg_zip_impl) Args: - out: output file name. Default: name + ".zip". + name: name + out: output file name. Default: name + ".zip". + **kwargs: the rest """ if not out: out = name + ".zip" diff --git a/third_party/rules_pkg-0.9.1/pkg/providers.bzl b/third_party/rules_pkg-1.0.1/pkg/providers.bzl similarity index 98% rename from third_party/rules_pkg-0.9.1/pkg/providers.bzl rename to third_party/rules_pkg-1.0.1/pkg/providers.bzl index 459a0643..6d5fc271 100644 --- a/third_party/rules_pkg-0.9.1/pkg/providers.bzl +++ b/third_party/rules_pkg-1.0.1/pkg/providers.bzl @@ -37,7 +37,7 @@ PackageFilesInfo = provider( Keys are strings representing attribute identifiers, values are arbitrary data structures that represent the associated data. These are -most often strings, but are not explicity defined. +most often strings, but are not explicitly defined. For known attributes and data type expectations, see the Common Attributes documentation in the `rules_pkg` reference. diff --git a/third_party/rules_pkg-0.9.1/pkg/releasing/BUILD b/third_party/rules_pkg-1.0.1/pkg/releasing/BUILD similarity index 94% rename from third_party/rules_pkg-0.9.1/pkg/releasing/BUILD rename to third_party/rules_pkg-1.0.1/pkg/releasing/BUILD index 2a38667a..4036c0fd 100644 --- a/third_party/rules_pkg-0.9.1/pkg/releasing/BUILD +++ b/third_party/rules_pkg-1.0.1/pkg/releasing/BUILD @@ -1,5 +1,4 @@ load("@rules_python//python:defs.bzl", "py_binary", "py_library", "py_test") -load("git.bzl", "git_changelog") package( default_applicable_licenses = ["//:license"], @@ -19,8 +18,7 @@ package( filegroup( name = "standard_package", - srcs = glob([ - "BUILD", + srcs = ["BUILD"] + glob([ "*.bzl", "*.py", ]), @@ -36,6 +34,7 @@ py_library( "__init__.py", "release_tools.py", ], + imports = ["../.."], srcs_version = "PY3", ) @@ -44,6 +43,7 @@ py_binary( srcs = [ "print_rel_notes.py", ], + imports = ["../.."], python_version = "PY3", deps = [ ":release_utils", diff --git a/third_party/rules_pkg-0.9.1/pkg/releasing/__init__.py b/third_party/rules_pkg-1.0.1/pkg/releasing/__init__.py similarity index 100% rename from third_party/rules_pkg-0.9.1/pkg/releasing/__init__.py rename to third_party/rules_pkg-1.0.1/pkg/releasing/__init__.py diff --git a/third_party/rules_pkg-0.9.1/pkg/releasing/defs.bzl b/third_party/rules_pkg-1.0.1/pkg/releasing/defs.bzl similarity index 95% rename from third_party/rules_pkg-0.9.1/pkg/releasing/defs.bzl rename to third_party/rules_pkg-1.0.1/pkg/releasing/defs.bzl index 9f381a64..52f3cd49 100644 --- a/third_party/rules_pkg-0.9.1/pkg/releasing/defs.bzl +++ b/third_party/rules_pkg-1.0.1/pkg/releasing/defs.bzl @@ -1,5 +1,6 @@ -"""Impementation for print_rel_notes.""" +"""Implementation for print_rel_notes.""" +# buildifier: disable=function-docstring def print_rel_notes( name, repo, @@ -14,6 +15,7 @@ def print_rel_notes( mirror_host = None): if not artifact_name: artifact_name = ":%s-%s.tar.gz" % (repo, version) + # Must use Label to get a path relative to the rules_pkg repository, # instead of the calling BUILD file. print_rel_notes_helper = Label("//pkg/releasing:print_rel_notes") @@ -33,6 +35,7 @@ def print_rel_notes( cmd.append("--toolchains_method=%s" % toolchains_method) if changelog: cmd.append("--changelog=$(location %s)" % changelog) + # We should depend on a changelog as a tool so that it is always built # for the host configuration. If the changelog is generated on the fly, # then we would have to run commands against our revision control diff --git a/third_party/rules_pkg-0.9.1/pkg/releasing/git.bzl b/third_party/rules_pkg-1.0.1/pkg/releasing/git.bzl similarity index 94% rename from third_party/rules_pkg-0.9.1/pkg/releasing/git.bzl rename to third_party/rules_pkg-1.0.1/pkg/releasing/git.bzl index 2952e6ce..f37e6c6a 100644 --- a/third_party/rules_pkg-0.9.1/pkg/releasing/git.bzl +++ b/third_party/rules_pkg-1.0.1/pkg/releasing/git.bzl @@ -26,9 +26,9 @@ def _git_changelog_impl(ctx): if toolchain.path: args.add("--git_path", toolchain.path) else: - executable = toolchain.label.files_to_run.executable + executable = toolchain.label[DefaultInfo].files_to_run.executable tools.append(executable) - tools.append(toolchain.label.default_runfiles.files.to_list()) + tools.append(toolchain.label[DefaultInfo].default_runfiles.files.to_list()) args.add("--git_path", executable.path) args.add("--git_root", toolchain.client_top) args.add("--from_ref", ctx.attr.from_ref) @@ -83,7 +83,6 @@ _git_changelog = rule( toolchains = ["@rules_pkg//toolchains/git:git_toolchain_type"], ) - def git_changelog(name, **kwargs): _git_changelog( name = name, @@ -93,5 +92,5 @@ def git_changelog(name, **kwargs): str(Label("//toolchains/git:have_git")): [], "//conditions:default": ["//:not_compatible"], }), - **kwargs, + **kwargs ) diff --git a/third_party/rules_pkg-0.9.1/pkg/releasing/git_changelog_private.py b/third_party/rules_pkg-1.0.1/pkg/releasing/git_changelog_private.py similarity index 100% rename from third_party/rules_pkg-0.9.1/pkg/releasing/git_changelog_private.py rename to third_party/rules_pkg-1.0.1/pkg/releasing/git_changelog_private.py diff --git a/third_party/rules_pkg-0.9.1/pkg/releasing/print_rel_notes.py b/third_party/rules_pkg-1.0.1/pkg/releasing/print_rel_notes.py similarity index 97% rename from third_party/rules_pkg-0.9.1/pkg/releasing/print_rel_notes.py rename to third_party/rules_pkg-1.0.1/pkg/releasing/print_rel_notes.py index 7e1c29f8..379d0c05 100644 --- a/third_party/rules_pkg-0.9.1/pkg/releasing/print_rel_notes.py +++ b/third_party/rules_pkg-1.0.1/pkg/releasing/print_rel_notes.py @@ -46,6 +46,12 @@ def print_notes(org, repo, version, tarball_path, mirror_host=None, **Change Log** ${changelog} + **MODULE.bazel setup** + + ``` + bazel_dep(name = "${repo}", version = "${version}") + ``` + **WORKSPACE setup** ``` diff --git a/third_party/rules_pkg-0.9.1/pkg/releasing/release_tools.py b/third_party/rules_pkg-1.0.1/pkg/releasing/release_tools.py similarity index 100% rename from third_party/rules_pkg-0.9.1/pkg/releasing/release_tools.py rename to third_party/rules_pkg-1.0.1/pkg/releasing/release_tools.py diff --git a/third_party/rules_pkg-0.9.1/pkg/releasing/release_tools_test.py b/third_party/rules_pkg-1.0.1/pkg/releasing/release_tools_test.py similarity index 98% rename from third_party/rules_pkg-0.9.1/pkg/releasing/release_tools_test.py rename to third_party/rules_pkg-1.0.1/pkg/releasing/release_tools_test.py index 7ca80d1d..7b2b443c 100644 --- a/third_party/rules_pkg-0.9.1/pkg/releasing/release_tools_test.py +++ b/third_party/rules_pkg-1.0.1/pkg/releasing/release_tools_test.py @@ -14,7 +14,7 @@ import unittest -import release_tools +from pkg.releasing import release_tools class ReleaseToolsTest(unittest.TestCase): diff --git a/third_party/rules_pkg-0.9.1/pkg/rpm.bzl b/third_party/rules_pkg-1.0.1/pkg/rpm.bzl similarity index 81% rename from third_party/rules_pkg-0.9.1/pkg/rpm.bzl rename to third_party/rules_pkg-1.0.1/pkg/rpm.bzl index ea5df1cc..c9d9e3b7 100644 --- a/third_party/rules_pkg-0.9.1/pkg/rpm.bzl +++ b/third_party/rules_pkg-1.0.1/pkg/rpm.bzl @@ -29,10 +29,12 @@ The mechanism for choosing between the two is documented in the function itself. """ +load("//pkg:rpm_pfg.bzl", _pkg_sub_rpm = "pkg_sub_rpm", pkg_rpm_pfg = "pkg_rpm") load("//pkg/legacy:rpm.bzl", pkg_rpm_legacy = "pkg_rpm") -load("//pkg:rpm_pfg.bzl", pkg_rpm_pfg = "pkg_rpm") -def pkg_rpm(name, srcs = None, spec_file = None, **kwargs): +pkg_sub_rpm = _pkg_sub_rpm + +def pkg_rpm(name, srcs = None, spec_file = None, subrpms = None, **kwargs): """pkg_rpm wrapper This rule selects between the two implementations of pkg_rpm as described in @@ -47,20 +49,24 @@ def pkg_rpm(name, srcs = None, spec_file = None, **kwargs): name: rule name srcs: pkg_rpm_pfg `srcs` attribute spec_file: pkg_rpm_legacy `spec_file` attribute - **kwargs: arguments to eihter `pkg_rpm_pfg` or `pkg_rpm_legacy`, + **kwargs: arguments to either `pkg_rpm_pfg` or `pkg_rpm_legacy`, depending on mode """ - if srcs and spec_file: + if srcs != None and spec_file: fail("Cannot determine which pkg_rpm rule to use. `srcs` and `spec_file` are mutually exclusive") - if not srcs and not spec_file: + if subrpms and spec_file: + fail("Cannot build sub RPMs with a specfile. `subrpms` and `spec_file` are mutually exclusive") + + if srcs == None and not spec_file: fail("Either `srcs` or `spec_file` must be provided.") - if srcs: + if srcs != None: pkg_rpm_pfg( name = name, srcs = srcs, + subrpms = subrpms, **kwargs ) elif spec_file: diff --git a/third_party/rules_pkg-0.9.1/pkg/rpm/BUILD b/third_party/rules_pkg-1.0.1/pkg/rpm/BUILD similarity index 97% rename from third_party/rules_pkg-0.9.1/pkg/rpm/BUILD rename to third_party/rules_pkg-1.0.1/pkg/rpm/BUILD index f7564b22..a2742f42 100644 --- a/third_party/rules_pkg-0.9.1/pkg/rpm/BUILD +++ b/third_party/rules_pkg-1.0.1/pkg/rpm/BUILD @@ -18,7 +18,6 @@ package(default_applicable_licenses = ["//:license"]) exports_files( glob([ - "*.bzl", "*.tpl", ]), visibility = ["//visibility:public"], @@ -27,7 +26,6 @@ exports_files( filegroup( name = "standard_package", srcs = glob([ - "*.bzl", "*.py", "*.tpl", ]) + [ diff --git a/third_party/rules_pkg-0.9.1/pkg/rpm/augment_rpm_files_install.py b/third_party/rules_pkg-1.0.1/pkg/rpm/augment_rpm_files_install.py similarity index 90% rename from third_party/rules_pkg-0.9.1/pkg/rpm/augment_rpm_files_install.py rename to third_party/rules_pkg-1.0.1/pkg/rpm/augment_rpm_files_install.py index 35bf729d..6f0762bb 100644 --- a/third_party/rules_pkg-0.9.1/pkg/rpm/augment_rpm_files_install.py +++ b/third_party/rules_pkg-1.0.1/pkg/rpm/augment_rpm_files_install.py @@ -22,10 +22,14 @@ import sys import json -# NOTE: Keep this in sync with the same variable in rpm.bzl +# NOTE: Keep those two in sync with the same variables in rpm_pfg.bzl _INSTALL_FILE_STANZA_FMT = """ -install -d %{{buildroot}}/$(dirname {1}) -cp {0} %{{buildroot}}/{1} +install -d "%{{buildroot}}/$(dirname '{1}')" +cp '{0}' '%{{buildroot}}/{1}' +""".strip() + +_FILE_MODE_STANZA_FMT = """ +{0} "{1}" """.strip() # Cheapo arg parsing. Currently this script is single-purpose. @@ -69,9 +73,7 @@ os.path.join(root, f), full_dest )) - dir_files_segments.append( - d["tags"] + " " + full_dest - ) + dir_files_segments.append(_FILE_MODE_STANZA_FMT.format(d["tags"], full_dest)) with open(existing_install_script_path, 'r') as fh: existing_install_script = fh.read() diff --git a/third_party/rules_pkg-0.9.1/pkg/rpm/template.spec.tpl b/third_party/rules_pkg-1.0.1/pkg/rpm/template.spec.tpl similarity index 85% rename from third_party/rules_pkg-0.9.1/pkg/rpm/template.spec.tpl rename to third_party/rules_pkg-1.0.1/pkg/rpm/template.spec.tpl index ca2aa46b..839a48d5 100644 --- a/third_party/rules_pkg-0.9.1/pkg/rpm/template.spec.tpl +++ b/third_party/rules_pkg-1.0.1/pkg/rpm/template.spec.tpl @@ -18,3 +18,9 @@ ${POST_SCRIPTLET} ${PREUN_SCRIPTLET} ${POSTUN_SCRIPTLET} + +${POSTTRANS_SCRIPTLET} + +${SUBRPMS} + +${CHANGELOG} diff --git a/third_party/rules_pkg-0.9.1/pkg/rpm_pfg.bzl b/third_party/rules_pkg-1.0.1/pkg/rpm_pfg.bzl similarity index 59% rename from third_party/rules_pkg-0.9.1/pkg/rpm_pfg.bzl rename to third_party/rules_pkg-1.0.1/pkg/rpm_pfg.bzl index 1e3450c1..cf6cb6bb 100644 --- a/third_party/rules_pkg-0.9.1/pkg/rpm_pfg.bzl +++ b/third_party/rules_pkg-1.0.1/pkg/rpm_pfg.bzl @@ -27,27 +27,55 @@ find_system_rpmbuild(name="rules_pkg_rpmbuild") load( "//pkg:providers.bzl", - "PackageArtifactInfo", "PackageDirsInfo", "PackageFilegroupInfo", "PackageFilesInfo", "PackageSymlinkInfo", "PackageVariablesInfo", ) -load("//pkg/private:util.bzl", "setup_output_files") +load("//pkg/private:util.bzl", "setup_output_files", "substitute_package_variables") rpm_filetype = [".rpm"] spec_filetype = [".spec", ".spec.in", ".spec.tpl"] +PackageSubRPMInfo = provider( + doc = """Provider representing a sub-RPM that can be built as part of a larger RPM""", + fields = { + "package_name": "name of the subpackage", + "summary": "RPM subpackage `Summary` tag", + "group": "RPM subpackage `Group` tag", + "description": "Multi-line description of this subpackage", + "post_scriptlet": "RPM `$post` scriplet for this subpackage", + "architecture": "Subpackage architecture", + "epoch": "RPM `Epoch` tag for this subpackage", + "version": "RPM `Version` tag for this subpackage", + "requires": "List of RPM capability expressions that this package requires", + "provides": "List of RPM capability expressions that this package provides", + "conflicts": "List of RPM capability expressions that conflict with this package", + "obsoletes": "List of RPM capability expressions that this package obsoletes", + "srcs": "Mapping groups to include in this RPM", + }, +) + +# default mode for %files +DEFAULT_FILE_MODE = "%defattr(-,root,root)" + # TODO(nacl): __install, __cp # {0} is the source, {1} is the dest # # TODO(nacl, #292): cp -r does not do the right thing with TreeArtifacts _INSTALL_FILE_STANZA_FMT = """ -install -d %{{buildroot}}/$(dirname {1}) -cp {0} %{{buildroot}}/{1} -""" +install -d "%{{buildroot}}/$(dirname '{1}')" +cp '{0}' '%{{buildroot}}/{1}' +chmod +w '%{{buildroot}}/{1}' +""".strip() + +_INSTALL_FILE_STANZA_FMT_FEDORA40_DEBUGINFO = """ +install -d "%{{buildroot}}/$(dirname '{1}')" +cp '../{0}' '%{{buildroot}}/{1}' +chmod +w '%{{buildroot}}/{1}' +""".strip() # TODO(nacl): __install # {0} is the directory name @@ -55,8 +83,8 @@ cp {0} %{{buildroot}}/{1} # This may not be strictly necessary, given that they'll be created in the # CPIO when rpmbuild processes the `%files` list. _INSTALL_DIR_STANZA_FMT = """ -install -d %{{buildroot}}/{0} -""" +install -d '%{{buildroot}}/{0}' +""".strip() # {0} is the name of the link, {1} is the target, {2} is the desired symlink "mode". # @@ -74,12 +102,17 @@ install -d %{{buildroot}}/{0} # XXX: This may not apply all that well to users of cygwin and mingw. We'll # deal with that when the time comes. _INSTALL_SYMLINK_STANZA_FMT = """ -%{{__install}} -d %{{buildroot}}/$(dirname {0}) -%{{__ln_s}} {1} %{{buildroot}}/{0} +%{{__install}} -d "%{{buildroot}}/$(dirname '{0}')" +%{{__ln_s}} '{1}' '%{{buildroot}}/{0}' %if "%_host_os" != "linux" - %{{__chmod}} -h {2} %{{buildroot}}/{0} + %{{__chmod}} -h {2} '%{{buildroot}}/{0}' %endif -""" +""".strip() + +# {0} is the file tag, {1} is the the path to file +_FILE_MODE_STANZA_FMT = """ +{0} "{1}" +""".strip() def _package_contents_metadata(origin_label, grouping_label): """Named construct for helping to identify conflicting packaged contents""" @@ -142,25 +175,41 @@ def _make_absolute_if_not_already_or_is_macro(path): # this can be inlined easily. return path if path.startswith(("/", "%")) else "/" + path -#### Input processing helper functons. +def _make_rpm_filename(rpm_name, version, architecture, package_name=None, release=None): + prefix = "%s-%s" + items = [rpm_name, version] + + if package_name: + prefix += "-%s" + items = [rpm_name, package_name, version] + + if release: + prefix += "-%s" + items += [release] + + fmt = prefix + ".%s.rpm" + + return fmt % tuple(items + [architecture]) + +#### Input processing helper functions. # TODO(nacl, #459): These are redundant with functions and structures in # pkg/private/pkg_files.bzl. We should really use the infrastructure provided # there, but as of writing, it's not quite ready. -def _process_files(pfi, origin_label, grouping_label, file_base, dest_check_map, packaged_directories, rpm_files_list, install_script_pieces): +def _process_files(pfi, origin_label, grouping_label, file_base, rpm_ctx, debuginfo_type): for dest, src in pfi.dest_src_map.items(): metadata = _package_contents_metadata(origin_label, grouping_label) - if dest in dest_check_map: - _conflicting_contents_error(dest, metadata, dest_check_map[dest]) + if dest in rpm_ctx.dest_check_map: + _conflicting_contents_error(dest, metadata, rpm_ctx.dest_check_map[dest]) else: - dest_check_map[dest] = metadata + rpm_ctx.dest_check_map[dest] = metadata abs_dest = _make_absolute_if_not_already_or_is_macro(dest) if src.is_directory: # Set aside TreeArtifact information for external processing # # @unsorted-dict-items - packaged_directories.append({ + rpm_ctx.packaged_directories.append({ "src": src, "dest": abs_dest, # This doesn't exactly make it extensible, but it saves @@ -170,56 +219,254 @@ def _process_files(pfi, origin_label, grouping_label, file_base, dest_check_map, }) else: # Files are well-known. Take care of them right here. - rpm_files_list.append(file_base + " " + abs_dest) - install_script_pieces.append(_INSTALL_FILE_STANZA_FMT.format( + rpm_ctx.rpm_files_list.append(_FILE_MODE_STANZA_FMT.format(file_base, abs_dest)) + + install_stanza_fmt = _INSTALL_FILE_STANZA_FMT + if debuginfo_type == "fedora40": + install_stanza_fmt = _INSTALL_FILE_STANZA_FMT_FEDORA40_DEBUGINFO + + rpm_ctx.install_script_pieces.append(install_stanza_fmt.format( src.path, abs_dest, )) -def _process_dirs(pdi, origin_label, grouping_label, file_base, dest_check_map, packaged_directories, rpm_files_list, install_script_pieces): +def _process_dirs(pdi, origin_label, grouping_label, file_base, rpm_ctx): for dest in pdi.dirs: metadata = _package_contents_metadata(origin_label, grouping_label) - if dest in dest_check_map: - _conflicting_contents_error(dest, metadata, dest_check_map[dest]) + if dest in rpm_ctx.dest_check_map: + _conflicting_contents_error(dest, metadata, rpm_ctx.dest_check_map[dest]) else: - dest_check_map[dest] = metadata + rpm_ctx.dest_check_map[dest] = metadata abs_dirname = _make_absolute_if_not_already_or_is_macro(dest) - rpm_files_list.append(file_base + " " + abs_dirname) + rpm_ctx.rpm_files_list.append(_FILE_MODE_STANZA_FMT.format(file_base, abs_dirname)) - install_script_pieces.append(_INSTALL_DIR_STANZA_FMT.format( + rpm_ctx.install_script_pieces.append(_INSTALL_DIR_STANZA_FMT.format( abs_dirname, )) -def _process_symlink(psi, origin_label, grouping_label, file_base, dest_check_map, packaged_directories, rpm_files_list, install_script_pieces): +def _process_symlink(psi, origin_label, grouping_label, file_base, rpm_ctx): metadata = _package_contents_metadata(origin_label, grouping_label) - if psi.destination in dest_check_map: - _conflicting_contents_error(psi.destination, metadata, dest_check_map[psi.destination]) + if psi.destination in rpm_ctx.dest_check_map: + _conflicting_contents_error(psi.destination, metadata, rpm_ctx.dest_check_map[psi.destination]) else: - dest_check_map[psi.destination] = metadata + rpm_ctx.dest_check_map[psi.destination] = metadata abs_dest = _make_absolute_if_not_already_or_is_macro(psi.destination) - rpm_files_list.append(file_base + " " + abs_dest) - install_script_pieces.append(_INSTALL_SYMLINK_STANZA_FMT.format( + rpm_ctx.rpm_files_list.append(_FILE_MODE_STANZA_FMT.format(file_base, abs_dest)) + rpm_ctx.install_script_pieces.append(_INSTALL_SYMLINK_STANZA_FMT.format( abs_dest, psi.target, psi.attributes["mode"], )) +def _process_dep(dep, rpm_ctx, debuginfo_type): + # NOTE: This does not detect cases where directories are not named + # consistently. For example, all of these may collide in reality, but + # won't be detected by the below: + # + # 1) usr/lib/libfoo.a + # 2) /usr/lib/libfoo.a + # 3) %{_libdir}/libfoo.a + # + # The most important thing, regardless of how these checks below are + # done, is to be consistent with path naming conventions. + # + # There is also an unsolved question of determining how to handle + # subdirectories of "PackageFilesInfo" targets that are actually + # directories. + + # dep is a Target + if PackageFilesInfo in dep: + _process_files( + dep[PackageFilesInfo], + dep.label, # origin label + None, # group label + _make_filetags(dep[PackageFilesInfo].attributes), # file_base + rpm_ctx, + debuginfo_type, + ) + + if PackageDirsInfo in dep: + _process_dirs( + dep[PackageDirsInfo], + dep.label, # origin label + None, # group label + _make_filetags(dep[PackageDirsInfo].attributes, "%dir"), # file_base + rpm_ctx, + ) + + if PackageSymlinkInfo in dep: + _process_symlink( + dep[PackageSymlinkInfo], + dep.label, # origin label + None, # group label + _make_filetags(dep[PackageSymlinkInfo].attributes), # file_base + rpm_ctx, + ) + + if PackageFilegroupInfo in dep: + pfg_info = dep[PackageFilegroupInfo] + for entry, origin in pfg_info.pkg_files: + file_base = _make_filetags(entry.attributes) + _process_files( + entry, + origin, + dep.label, + file_base, + rpm_ctx, + debuginfo_type + ) + for entry, origin in pfg_info.pkg_dirs: + file_base = _make_filetags(entry.attributes, "%dir") + _process_dirs( + entry, + origin, + dep.label, + file_base, + rpm_ctx, + ) + + for entry, origin in pfg_info.pkg_symlinks: + file_base = _make_filetags(entry.attributes) + _process_symlink( + entry, + origin, + dep.label, + file_base, + rpm_ctx, + ) + +def _process_subrpm(ctx, rpm_name, rpm_info, rpm_ctx, debuginfo_type): + sub_rpm_ctx = struct( + dest_check_map = {}, + install_script_pieces = [], + packaged_directories = [], + rpm_files_list = [], + ) + + rpm_lines = [ + "%%package %s" % rpm_info.package_name, + "Summary: %s" % rpm_info.summary, + ] + + if rpm_info.architecture: + rpm_lines += ["BuildArch: %s" % rpm_info.architecture] + + if rpm_info.epoch: + rpm_lines += ["Epoch: %s" % rpm_info.epoch] + + if rpm_info.version: + rpm_lines += ["Version: %s" % rpm_info.version] + + for r in rpm_info.requires: + rpm_lines += ["Requires: %s" % r] + + for p in rpm_info.provides: + rpm_lines += ["Provides: %s" % p] + + for c in rpm_info.conflicts: + rpm_lines += ["Conflicts: %s" % c] + + for o in rpm_info.obsoletes: + rpm_lines += ["Obsoletes: %s" % o] + + rpm_lines += [ + "", + "%%description %s" % rpm_info.package_name, + rpm_info.description, + ] + + if rpm_info.post_scriptlet: + rpm_lines += [ + "", + "%%post %s" % rpm_info.package_name, + ] + + if rpm_info.srcs: + rpm_lines += [ + "", + "%%files %s" % rpm_info.package_name, + ] + + for dep in rpm_info.srcs: + _process_dep(dep, sub_rpm_ctx, debuginfo_type) + + # rpmbuild will be unhappy if we have no files so we stick + # default file mode in for that scenario + rpm_lines += [DEFAULT_FILE_MODE] + rpm_lines += sub_rpm_ctx.rpm_files_list + + rpm_lines += [""] + + rpm_ctx.install_script_pieces.extend(sub_rpm_ctx.install_script_pieces) + rpm_ctx.packaged_directories.extend(sub_rpm_ctx.packaged_directories) + + package_file_name = _make_rpm_filename( + rpm_name = rpm_name, + version = rpm_info.version or ctx.attr.version, + architecture = rpm_info.architecture or ctx.attr.architecture, + package_name = rpm_info.package_name, + release = ctx.attr.release, + ) + + default_file = ctx.actions.declare_file("{}-{}.rpm".format(rpm_name, rpm_info.package_name)) + + _, output_file, _ = setup_output_files( + ctx, + package_file_name = package_file_name, + default_output_file = default_file, + ) + + rpm_ctx.output_rpm_files.append(output_file) + rpm_ctx.make_rpm_args.append("--subrpm_out_file=%s:%s" % ( + rpm_info.package_name, + output_file.path, + )) + + return rpm_lines + #### Rule implementation def _pkg_rpm_impl(ctx): """Implements the pkg_rpm rule.""" + rpm_ctx = struct( + # Ensure that no destinations collide. RPMs that fail this check may be + # correct, but the output may also create hard-to-debug issues. Better + # to err on the side of correctness here. + dest_check_map = {}, + + # The contents of the "%install" scriptlet + install_script_pieces = [], + + # The list of entries in the "%files" list + rpm_files_list = [], + + # Directories (TreeArtifacts) are to be treated differently. + # Specifically, since Bazel does not know their contents at analysis + # time, processing them needs to be delegated to a helper script. This + # is done via the _treeartifact_helper script used later on. + packaged_directories = [], + + # RPM files we expect to generate + output_rpm_files = [], + + # Arguments that we pass to make_rpm.py + make_rpm_args = [], + ) + files = [] tools = [] - args = ["--name=" + ctx.label.name] + debuginfo_type = "none" + name = ctx.attr.package_name if ctx.attr.package_name else ctx.label.name + rpm_ctx.make_rpm_args.append("--name=" + name) if ctx.attr.debug: - args.append("--debug") + rpm_ctx.make_rpm_args.append("--debug") if ctx.attr.rpmbuild_path: - args.append("--rpmbuild=" + ctx.attr.rpmbuild_path) + rpm_ctx.make_rpm_args.append("--rpmbuild=" + ctx.attr.rpmbuild_path) # buildifier: disable=print print("rpmbuild_path is deprecated. See the README for instructions on how" + @@ -230,11 +477,15 @@ def _pkg_rpm_impl(ctx): fail("The rpmbuild_toolchain is not properly configured: " + toolchain.name) if toolchain.path: - args.append("--rpmbuild=" + toolchain.path) + rpm_ctx.make_rpm_args.append("--rpmbuild=" + toolchain.path) else: executable_files = toolchain.label[DefaultInfo].files_to_run tools.append(executable_files) - args.append("--rpmbuild=%s" % executable_files.executable.path) + rpm_ctx.make_rpm_args.append("--rpmbuild=%s" % executable_files.executable.path) + + if ctx.attr.debuginfo: + debuginfo_type = toolchain.debuginfo_type + rpm_ctx.make_rpm_args.append("--debuginfo_type=%s" % debuginfo_type) #### Calculate output file name # rpm_name takes precedence over name if provided @@ -247,19 +498,13 @@ def _pkg_rpm_impl(ctx): package_file_name = ctx.attr.package_file_name if not package_file_name: - package_file_name = "%s-%s-%s.%s.rpm" % ( + package_file_name = _make_rpm_filename( rpm_name, ctx.attr.version, - ctx.attr.release, ctx.attr.architecture, + release = ctx.attr.release, ) - outputs, output_file, output_name = setup_output_files( - ctx, - package_file_name = package_file_name, - default_output_file = default_file, - ) - #### rpm spec "preamble" preamble_pieces = [] @@ -270,8 +515,8 @@ def _pkg_rpm_impl(ctx): if ctx.attr.version: fail("Both version and version_file attributes were specified") - preamble_pieces.append("Version: ${VERSION_FROM_FILE}") - args.append("--version=@" + ctx.file.version_file.path) + preamble_pieces.append("Version: ${{VERSION_FROM_FILE}}") + rpm_ctx.make_rpm_args.append("--version=@" + ctx.file.version_file.path) files.append(ctx.file.version_file) elif ctx.attr.version: preamble_pieces.append("Version: " + ctx.attr.version) @@ -283,8 +528,8 @@ def _pkg_rpm_impl(ctx): if ctx.attr.release: fail("Both release and release_file attributes were specified") - preamble_pieces.append("Release: ${RELEASE_FROM_FILE}") - args.append("--release=@" + ctx.file.release_file.path) + preamble_pieces.append("Release: ${{RELEASE_FROM_FILE}}") + rpm_ctx.make_rpm_args.append("--release=@" + ctx.file.release_file.path) files.append(ctx.file.release_file) elif ctx.attr.release: preamble_pieces.append("Release: " + ctx.attr.release) @@ -300,11 +545,13 @@ def _pkg_rpm_impl(ctx): if ctx.attr.source_date_epoch_file: if ctx.attr.source_date_epoch >= 0: fail("Both source_date_epoch and source_date_epoch_file attributes were specified") - args.append("--source_date_epoch=@" + ctx.file.source_date_epoch_file.path) + rpm_ctx.make_rpm_args.append("--source_date_epoch=@" + ctx.file.source_date_epoch_file.path) files.append(ctx.file.source_date_epoch_file) elif ctx.attr.source_date_epoch >= 0: - args.append("--source_date_epoch=" + str(ctx.attr.source_date_epoch)) + rpm_ctx.make_rpm_args.append("--source_date_epoch=" + str(ctx.attr.source_date_epoch)) + if ctx.attr.epoch: + preamble_pieces.append("Epoch: " + ctx.attr.epoch) if ctx.attr.summary: preamble_pieces.append("Summary: " + ctx.attr.summary) if ctx.attr.url: @@ -317,6 +564,8 @@ def _pkg_rpm_impl(ctx): preamble_pieces.extend(["Provides: " + p for p in ctx.attr.provides]) if ctx.attr.conflicts: preamble_pieces.extend(["Conflicts: " + c for c in ctx.attr.conflicts]) + if ctx.attr.obsoletes: + preamble_pieces.extend(["Obsoletes: " + o for o in ctx.attr.obsoletes]) if ctx.attr.requires: preamble_pieces.extend(["Requires: " + r for r in ctx.attr.requires]) if ctx.attr.requires_contextual: @@ -343,10 +592,10 @@ def _pkg_rpm_impl(ctx): ) ctx.actions.write( output = preamble_file, - content = "\n".join(preamble_pieces), + content = substitute_package_variables(ctx, "\n".join(preamble_pieces)), ) files.append(preamble_file) - args.append("--preamble=" + preamble_file.path) + rpm_ctx.make_rpm_args.append("--preamble=" + preamble_file.path) #### %description @@ -366,7 +615,11 @@ def _pkg_rpm_impl(ctx): fail("None of the description or description_file attributes were specified") files.append(description_file) - args.append("--description=" + description_file.path) + rpm_ctx.make_rpm_args.append("--description=" + description_file.path) + + if ctx.attr.changelog: + files.append(ctx.file.changelog) + rpm_ctx.make_rpm_args.append("--changelog=" + ctx.file.changelog.path) #### Non-procedurally-generated scriptlets @@ -376,48 +629,60 @@ def _pkg_rpm_impl(ctx): fail("Both pre_scriptlet and pre_scriptlet_file attributes were specified") pre_scriptlet_file = ctx.file.pre_scriptlet_file files.append(pre_scriptlet_file) - args.append("--pre_scriptlet=" + pre_scriptlet_file.path) + rpm_ctx.make_rpm_args.append("--pre_scriptlet=" + pre_scriptlet_file.path) elif ctx.attr.pre_scriptlet: scriptlet_file = ctx.actions.declare_file(ctx.label.name + ".pre_scriptlet") files.append(scriptlet_file) ctx.actions.write(scriptlet_file, ctx.attr.pre_scriptlet) - args.append("--pre_scriptlet=" + scriptlet_file.path) + rpm_ctx.make_rpm_args.append("--pre_scriptlet=" + scriptlet_file.path) if ctx.attr.post_scriptlet_file: if ctx.attr.post_scriptlet: fail("Both post_scriptlet and post_scriptlet_file attributes were specified") post_scriptlet_file = ctx.file.post_scriptlet_file files.append(post_scriptlet_file) - args.append("--post_scriptlet=" + post_scriptlet_file.path) + rpm_ctx.make_rpm_args.append("--post_scriptlet=" + post_scriptlet_file.path) elif ctx.attr.post_scriptlet: scriptlet_file = ctx.actions.declare_file(ctx.label.name + ".post_scriptlet") files.append(scriptlet_file) ctx.actions.write(scriptlet_file, ctx.attr.post_scriptlet) - args.append("--post_scriptlet=" + scriptlet_file.path) + rpm_ctx.make_rpm_args.append("--post_scriptlet=" + scriptlet_file.path) if ctx.attr.preun_scriptlet_file: if ctx.attr.preun_scriptlet: fail("Both preun_scriptlet and preun_scriptlet_file attributes were specified") preun_scriptlet_file = ctx.file.preun_scriptlet_file files.append(preun_scriptlet_file) - args.append("--preun_scriptlet=" + preun_scriptlet_file.path) + rpm_ctx.make_rpm_args.append("--preun_scriptlet=" + preun_scriptlet_file.path) elif ctx.attr.preun_scriptlet: scriptlet_file = ctx.actions.declare_file(ctx.label.name + ".preun_scriptlet") files.append(scriptlet_file) ctx.actions.write(scriptlet_file, ctx.attr.preun_scriptlet) - args.append("--preun_scriptlet=" + scriptlet_file.path) + rpm_ctx.make_rpm_args.append("--preun_scriptlet=" + scriptlet_file.path) if ctx.attr.postun_scriptlet_file: if ctx.attr.postun_scriptlet: fail("Both postun_scriptlet and postun_scriptlet_file attributes were specified") postun_scriptlet_file = ctx.file.postun_scriptlet_file files.append(postun_scriptlet_file) - args.append("--postun_scriptlet=" + postun_scriptlet_file.path) + rpm_ctx.make_rpm_args.append("--postun_scriptlet=" + postun_scriptlet_file.path) elif ctx.attr.postun_scriptlet: scriptlet_file = ctx.actions.declare_file(ctx.label.name + ".postun_scriptlet") files.append(scriptlet_file) ctx.actions.write(scriptlet_file, ctx.attr.postun_scriptlet) - args.append("--postun_scriptlet=" + scriptlet_file.path) + rpm_ctx.make_rpm_args.append("--postun_scriptlet=" + scriptlet_file.path) + + if ctx.attr.posttrans_scriptlet_file: + if ctx.attr.posttrans_scriptlet: + fail("Both posttrans_scriptlet and posttrans_scriptlet_file attributes were specified") + posttrans_scriptlet_file = ctx.file.posttrans_scriptlet_file + files.append(posttrans_scriptlet_file) + rpm_ctx.make_rpm_args.append("--posttrans_scriptlet=" + posttrans_scriptlet_file.path) + elif ctx.attr.posttrans_scriptlet: + scriptlet_file = ctx.actions.declare_file(ctx.label.name + ".posttrans_scriptlet") + files.append(scriptlet_file) + ctx.actions.write(scriptlet_file, ctx.attr.posttrans_scriptlet) + rpm_ctx.make_rpm_args.append("--posttrans_scriptlet=" + scriptlet_file.path) #### Expand the spec file template; prepare data files @@ -427,38 +692,23 @@ def _pkg_rpm_impl(ctx): output = spec_file, substitutions = substitutions, ) - args.append("--spec_file=" + spec_file.path) + rpm_ctx.make_rpm_args.append("--spec_file=" + spec_file.path) files.append(spec_file) - args.append("--out_file=" + output_file.path) + # Add data files + files += ctx.files.srcs + ctx.files.subrpms - # Add data files. - if ctx.file.changelog: - files.append(ctx.file.changelog) - args.append(ctx.file.changelog.path) - - files += ctx.files.srcs - - #### Consistency checking; input processing + _, output_file, _ = setup_output_files( + ctx, + package_file_name = package_file_name, + default_output_file = default_file, + ) - # Ensure that no destinations collide. RPMs that fail this check may be - # correct, but the output may also create hard-to-debug issues. Better to - # err on the side of correctness here. - dest_check_map = {} + rpm_ctx.make_rpm_args.append("--out_file=" + output_file.path) + rpm_ctx.output_rpm_files.append(output_file) - # The contents of the "%install" scriptlet - install_script_pieces = [] if ctx.attr.debug: - install_script_pieces.append("set -x") - - # The list of entries in the "%files" list - rpm_files_list = [] - - # Directories (TreeArtifacts) are to be treated differently. Specifically, - # since Bazel does not know their contents at analysis time, processing them - # needs to be delegated to a helper script. This is done via the - # _treeartifact_helper script used later on. - packaged_directories = [] + rpm_ctx.install_script_pieces.append("set -x") # Iterate over all incoming data, checking for conflicts and creating # datasets as we go from the actual contents of the RPM. @@ -467,98 +717,47 @@ def _pkg_rpm_impl(ctx): # produce an installation script that is longer than necessary. A better # implementation would track directories that are created and ensure that # they aren't unnecessarily recreated. + for dep in ctx.attr.srcs: - # NOTE: This does not detect cases where directories are not named - # consistently. For example, all of these may collide in reality, but - # won't be detected by the below: - # - # 1) usr/lib/libfoo.a - # 2) /usr/lib/libfoo.a - # 3) %{_libdir}/libfoo.a - # - # The most important thing, regardless of how these checks below are - # done, is to be consistent with path naming conventions. - # - # There is also an unsolved question of determining how to handle - # subdirectories of "PackageFilesInfo" targets that are actually - # directories. + _process_dep(dep, rpm_ctx, debuginfo_type) - # dep is a Target - if PackageFilesInfo in dep: - _process_files( - dep[PackageFilesInfo], - dep.label, # origin label - None, # group label - _make_filetags(dep[PackageFilesInfo].attributes), # file_base - dest_check_map, - packaged_directories, - rpm_files_list, - install_script_pieces, - ) + #### subrpms + if ctx.attr.subrpms: + subrpm_lines = [] + for s in ctx.attr.subrpms: + subrpm_lines.extend(_process_subrpm( + ctx, rpm_name, s[PackageSubRPMInfo], rpm_ctx, debuginfo_type)) - if PackageDirsInfo in dep: - _process_dirs( - dep[PackageDirsInfo], - dep.label, # origin label - None, # group label - _make_filetags(dep[PackageDirsInfo].attributes, "%dir"), # file_base - dest_check_map, - packaged_directories, - rpm_files_list, - install_script_pieces, - ) + subrpm_file = ctx.actions.declare_file( + "{}.spec.subrpms".format(rpm_name), + ) + ctx.actions.write( + output = subrpm_file, + content = "\n".join(subrpm_lines), + ) + files.append(subrpm_file) + rpm_ctx.make_rpm_args.append("--subrpms=" + subrpm_file.path) - if PackageSymlinkInfo in dep: - _process_symlink( - dep[PackageSymlinkInfo], - dep.label, # origin label - None, # group label - _make_filetags(dep[PackageSymlinkInfo].attributes), # file_base - dest_check_map, - packaged_directories, - rpm_files_list, - install_script_pieces, - ) + if debuginfo_type != "none": + debuginfo_default_file = ctx.actions.declare_file( + "{}-debuginfo.rpm".format(rpm_name)) + debuginfo_package_file_name = _make_rpm_filename( + rpm_name, + ctx.attr.version, + ctx.attr.architecture, + package_name = "debuginfo", + release = ctx.attr.release, + ) + + _, debuginfo_output_file, _ = setup_output_files( + ctx, + debuginfo_package_file_name, + default_output_file = debuginfo_default_file, + ) - if PackageFilegroupInfo in dep: - pfg_info = dep[PackageFilegroupInfo] - for entry, origin in pfg_info.pkg_files: - file_base = _make_filetags(entry.attributes) - _process_files( - entry, - origin, - dep.label, - file_base, - dest_check_map, - packaged_directories, - rpm_files_list, - install_script_pieces, - ) - for entry, origin in pfg_info.pkg_dirs: - file_base = _make_filetags(entry.attributes, "%dir") - _process_dirs( - entry, - origin, - dep.label, - file_base, - dest_check_map, - packaged_directories, - rpm_files_list, - install_script_pieces, - ) - - for entry, origin in pfg_info.pkg_symlinks: - file_base = _make_filetags(entry.attributes) - _process_symlink( - entry, - origin, - dep.label, - file_base, - dest_check_map, - packaged_directories, - rpm_files_list, - install_script_pieces, - ) + rpm_ctx.output_rpm_files.append(debuginfo_output_file) + rpm_ctx.make_rpm_args.append( + "--subrpm_out_file=debuginfo:%s" % debuginfo_output_file.path ) #### Procedurally-generated scripts/lists (%install, %files) @@ -567,22 +766,23 @@ def _pkg_rpm_impl(ctx): install_script = ctx.actions.declare_file("{}.spec.install".format(rpm_name)) ctx.actions.write( install_script, - "\n".join(install_script_pieces), + "\n".join(rpm_ctx.install_script_pieces), ) rpm_files_file = ctx.actions.declare_file( "{}.spec.files".format(rpm_name), ) - ctx.actions.write( - rpm_files_file, - "\n".join(rpm_files_list), - ) + + # rpmbuild will be unhappy if we have no files so we stick + # default file mode in for that scenario + rpm_files_contents = [DEFAULT_FILE_MODE] + rpm_ctx.rpm_files_list + ctx.actions.write(rpm_files_file, "\n".join(rpm_files_contents)) # TreeArtifact processing work - if packaged_directories: + if rpm_ctx.packaged_directories: packaged_directories_file = ctx.actions.declare_file("{}.spec.packaged_directories.json".format(rpm_name)) - packaged_directories_inputs = [d["src"] for d in packaged_directories] + packaged_directories_inputs = [d["src"] for d in rpm_ctx.packaged_directories] # This isn't the prettiest thing in the world, but it works. Bazel # needs the "File" data to pass to the command, but "File"s cannot be @@ -591,10 +791,10 @@ def _pkg_rpm_impl(ctx): # This data isn't used outside of this block, so it's probably fine. # Cleaner code would separate the JSONable values from the File type (in # a struct, probably). - for d in packaged_directories: + for d in rpm_ctx.packaged_directories: d["src"] = d["src"].path - ctx.actions.write(packaged_directories_file, json.encode(packaged_directories)) + ctx.actions.write(packaged_directories_file, json.encode(rpm_ctx.packaged_directories)) # Overwrite all following uses of the install script and files lists to # use the ones generated below. @@ -622,10 +822,10 @@ def _pkg_rpm_impl(ctx): # And then we're done. Yay! files.append(install_script) - args.append("--install_script=" + install_script.path) + rpm_ctx.make_rpm_args.append("--install_script=" + install_script.path) files.append(rpm_files_file) - args.append("--file_list=" + rpm_files_file.path) + rpm_ctx.make_rpm_args.append("--file_list=" + rpm_files_file.path) #### Remaining setup @@ -636,10 +836,16 @@ def _pkg_rpm_impl(ctx): "_binary_payload {}".format(ctx.attr.binary_payload_compression), ]) - args.extend(["--rpmbuild_arg=" + a for a in additional_rpmbuild_args]) + for key, value in ctx.attr.defines.items(): + additional_rpmbuild_args.extend([ + "--define", + "{} {}".format(key, value), + ]) + + rpm_ctx.make_rpm_args.extend(["--rpmbuild_arg=" + a for a in additional_rpmbuild_args]) - for f in ctx.files.srcs: - args.append(f.path) + for f in ctx.files.srcs + ctx.files.subrpms: + rpm_ctx.make_rpm_args.append(f.path) #### Call the generator script. @@ -647,9 +853,9 @@ def _pkg_rpm_impl(ctx): mnemonic = "MakeRpm", executable = ctx.executable._make_rpm, use_default_shell_env = True, - arguments = args, + arguments = rpm_ctx.make_rpm_args, inputs = files, - outputs = [output_file], + outputs = rpm_ctx.output_rpm_files, env = { "LANG": "en_US.UTF-8", "LC_CTYPE": "UTF-8", @@ -660,23 +866,18 @@ def _pkg_rpm_impl(ctx): ) changes = [] - if ctx.attr.changelog: - changes = [ctx.attr.changelog] + if ctx.file.changelog: + changes = [ctx.file.changelog] output_groups = { "out": [default_file], - "rpm": [output_file], - "changes": changes + "rpm": rpm_ctx.output_rpm_files, + "changes": changes, } return [ OutputGroupInfo(**output_groups), DefaultInfo( - files = depset(outputs), - ), - PackageArtifactInfo( - file = output_file, - file_name = output_name, - label = ctx.label.name, + files = depset(rpm_ctx.output_rpm_files), ), ] @@ -710,6 +911,14 @@ pkg_rpm = rule( Is the equivalent to `%config(missingok, noreplace)` in the `%files` list. + This rule produces 2 artifacts: an .rpm and a .changes file. The DefaultInfo will + include both. If you need downstream rule to specifically depend on only the .rpm or + .changes file then you can use `filegroup` to select distinct output groups. + + **OutputGroupInfo** + - `out` the RPM or a symlink to the actual package. + - `rpm` the package with any precise file name created with `package_file_name`. + - `changes` the .changes file. """, # @unsorted-dict-items attrs = { @@ -734,6 +943,9 @@ pkg_rpm = rule( doc = "See 'Common Attributes' in the rules_pkg reference", providers = [PackageVariablesInfo], ), + "epoch": attr.string( + doc = """Optional; RPM "Epoch" tag.""", + ), "version": attr.string( doc = """RPM "Version" tag. @@ -901,12 +1113,22 @@ pkg_rpm = rule( doc = """File containing the RPM `%postun` scriptlet""", allow_single_file = True, ), + "posttrans_scriptlet": attr.string( + doc = """RPM `%posttrans` scriptlet. Currently only allowed to be a shell script. + + `posttrans_scriptlet` and `posttrans_scriptlet_file` are mutually exclusive. + """, + ), + "posttrans_scriptlet_file": attr.label( + doc = """File containing the RPM `%posttrans` scriptlet""", + allow_single_file = True, + ), "conflicts": attr.string_list( doc = """List of capabilities that conflict with this package when it is installed. Corresponds to the "Conflicts" preamble tag. - See also: https://rpm.org/user_doc/dependencies.html + See also: https://rpm-software-management.github.io/rpm/manual/dependencies.html """, ), "provides": attr.string_list( @@ -914,7 +1136,15 @@ pkg_rpm = rule( Corresponds to the "Provides" preamble tag. - See also: https://rpm.org/user_doc/dependencies.html + See also: https://rpm-software-management.github.io/rpm/manual/dependencies.html + """, + ), + "obsoletes": attr.string_list( + doc = """List of rpm capability expressions that this package obsoletes. + + Corresponds to the "Obsoletes" preamble tag. + + See also: https://rpm-software-management.github.io/rpm/manual/dependencies.html """, ), "requires": attr.string_list( @@ -922,7 +1152,7 @@ pkg_rpm = rule( Corresponds to the "Requires" preamble tag. - See also: https://rpm.org/user_doc/dependencies.html + See also: https://rpm-software-management.github.io/rpm/manual/dependencies.html """, ), "requires_contextual": attr.string_list_dict( @@ -957,7 +1187,7 @@ pkg_rpm = rule( For capabilities that are always required by packages at runtime, use the `requires` attribute instead. - See also: https://rpm.org/user_doc/more_dependencies.html + See also: https://rpm-software-management.github.io/rpm/manual/more_dependencies.html NOTE: `pkg_rpm` does not check if the keys of this dictionary are acceptable to `rpm(8)`. @@ -998,6 +1228,30 @@ pkg_rpm = rule( overcommitting your system. """, ), + "defines": attr.string_dict( + doc = """Additional definitions to pass to rpmbuild""", + ), + "subrpms": attr.label_list( + doc = """Sub RPMs to build with this RPM + + A list of `pkg_sub_rpm` instances that can be used to create sub RPMs as part of the + overall package build. + + NOTE: use of `subrpms` is incompatible with the legacy `spec_file` mode + """, + providers = [ + [PackageSubRPMInfo], + ], + ), + "debuginfo": attr.bool( + doc = """Enable generation of debuginfo RPMs + + For supported platforms this will enable the generation of debuginfo RPMs adjacent + to the regular RPMs. Currently this is supported by Fedora 40, CentOS7 and + CentOS Stream 9. + """, + default = False, + ), "rpmbuild_path": attr.string( doc = """Path to a `rpmbuild` binary. Deprecated in favor of the rpmbuild toolchain""", ), @@ -1017,6 +1271,82 @@ pkg_rpm = rule( }, executable = False, implementation = _pkg_rpm_impl, - provides = [PackageArtifactInfo], toolchains = ["@rules_pkg//toolchains/rpm:rpmbuild_toolchain_type"], ) + +def _pkg_sub_rpm_impl(ctx): + mapped_files_depsets = [] + + for s in ctx.attr.srcs: + if PackageFilegroupInfo in s: + mapped_files_depsets.append(s[DefaultInfo].files) + + if PackageFilesInfo in s: + # dict.values() returns a list, not an iterator like in python3 + mapped_files_depsets.append(s[DefaultInfo].files) + + return [ + PackageSubRPMInfo( + package_name = ctx.attr.package_name, + summary = ctx.attr.summary, + group = ctx.attr.group, + description = ctx.attr.description, + post_scriptlet = ctx.attr.post_scriptlet, + architecture = ctx.attr.architecture, + epoch = ctx.attr.epoch, + version = ctx.attr.version, + requires = ctx.attr.requires, + provides = ctx.attr.provides, + conflicts = ctx.attr.conflicts, + obsoletes = ctx.attr.obsoletes, + srcs = ctx.attr.srcs, + ), + DefaultInfo( + files = depset(transitive = mapped_files_depsets), + ), + ] + +pkg_sub_rpm = rule( + doc = """Define a sub RPM to be built as part of a parent RPM + + This rule uses the outputs of the rules in `mappings.bzl` to define an sub + RPM that will be built as part of a larger RPM defined by a `pkg_rpm` instance. + + """, + implementation = _pkg_sub_rpm_impl, + # @unsorted-dict-items + attrs = { + "package_name": attr.string(doc = "name of the subrpm"), + "summary": attr.string(doc = "Sub RPM `Summary` tag"), + "group": attr.string( + doc = """Optional; RPM "Group" tag. + + NOTE: some distributions (as of writing, Fedora > 17 and CentOS/RHEL + > 5) have deprecated this tag. Other distributions may require it, + but it is harmless in any case. + + """, + ), + "description": attr.string(doc = "Multi-line description of this subrpm"), + "post_scriptlet": attr.string(doc = "RPM `%post` scriplet for this subrpm"), + "architecture": attr.string(doc = "Sub RPM architecture"), + "epoch": attr.string(doc = "RPM `Epoch` tag for this subrpm"), + "version": attr.string(doc = "RPM `Version` tag for this subrpm"), + "requires": attr.string_list(doc = "List of RPM capability expressions that this package requires"), + "provides": attr.string_list(doc = "List of RPM capability expressions that this package provides"), + "conflicts": attr.string_list(doc = "List of RPM capability expressions that conflict with this package"), + "obsoletes": attr.string_list(doc = "List of RPM capability expressions that this package obsoletes"), + "srcs": attr.label_list( + doc = "Mapping groups to include in this RPM", + mandatory = True, + providers = [ + [PackageSubRPMInfo, DefaultInfo], + [PackageFilegroupInfo, DefaultInfo], + [PackageFilesInfo, DefaultInfo], + [PackageDirsInfo], + [PackageSymlinkInfo], + ], + ), + }, + provides = [PackageSubRPMInfo], +) diff --git a/third_party/rules_pkg-0.9.1/pkg/tar.bzl b/third_party/rules_pkg-1.0.1/pkg/tar.bzl similarity index 100% rename from third_party/rules_pkg-0.9.1/pkg/tar.bzl rename to third_party/rules_pkg-1.0.1/pkg/tar.bzl diff --git a/third_party/rules_pkg-0.9.1/pkg/verify_archive.bzl b/third_party/rules_pkg-1.0.1/pkg/verify_archive.bzl similarity index 81% rename from third_party/rules_pkg-0.9.1/pkg/verify_archive.bzl rename to third_party/rules_pkg-1.0.1/pkg/verify_archive.bzl index e31d539c..d132d497 100644 --- a/third_party/rules_pkg-0.9.1/pkg/verify_archive.bzl +++ b/third_party/rules_pkg-1.0.1/pkg/verify_archive.bzl @@ -23,7 +23,6 @@ The execution time is O(# expected patterns * size of archive). load("@rules_python//python:defs.bzl", "py_test") - def _gen_verify_archive_test_main_impl(ctx): ctx.actions.expand_template( template = ctx.file._template, @@ -38,6 +37,7 @@ def _gen_verify_archive_test_main_impl(ctx): "${MUST_NOT_CONTAIN_REGEX}": str(ctx.attr.must_not_contain_regex), "${MIN_SIZE}": str(ctx.attr.min_size), "${MAX_SIZE}": str(ctx.attr.max_size), + "${VERIFY_LINKS}": str(ctx.attr.verify_links), }, ) return [ @@ -55,7 +55,6 @@ _gen_verify_archive_test_main = rule( allow_single_file = True, mandatory = True, ), - "must_contain": attr.string_list( doc = "List of paths which all must appear in the archive.", ), @@ -69,10 +68,13 @@ _gen_verify_archive_test_main = rule( doc = """List of regexes that must not be in the archive.""", ), "min_size": attr.int( - doc = """Miniumn number of entries in the archive.""" + doc = """Minimum number of entries in the archive.""", ), "max_size": attr.int( - doc = """Miniumn number of entries in the archive.""" + doc = """Maximum number of entries in the archive.""", + ), + "verify_links": attr.string_dict( + doc = """Dict keyed by paths which must appear, and be symlinks to their values.""", ), # Implicit dependencies. @@ -83,10 +85,18 @@ _gen_verify_archive_test_main = rule( }, ) -def verify_archive_test(name, target, - must_contain=None, must_contain_regex=None, - must_not_contain=None, must_not_contain_regex=None, - min_size=1, max_size=-1): +# buildifier: disable=function-docstring-args +def verify_archive_test( + name, + target, + must_contain = None, + must_contain_regex = None, + must_not_contain = None, + must_not_contain_regex = None, + min_size = 1, + max_size = -1, + tags = None, + verify_links = None): """Tests that an archive contains specific file patterns. This test is used to verify that an archive contains the expected content. @@ -99,12 +109,14 @@ def verify_archive_test(name, target, must_not_contain_regex: A list of path regexes which must not appear in the archive. min_size: The minimum number of entries which must be in the archive. max_size: The maximum number of entries which must be in the archive. + tags: standard meaning + verify_links: Dict keyed by paths which must appear, and be symlinks to their values. """ - test_src = name + "__internal_main.py" + test_src = name + "__internal_main.py" _gen_verify_archive_test_main( name = name + "_internal_main", target = target, - test_name = name.replace('-', '_') + "Test", + test_name = name.replace("-", "_") + "Test", out = test_src, must_contain = must_contain, must_contain_regex = must_contain_regex, @@ -112,17 +124,13 @@ def verify_archive_test(name, target, must_not_contain_regex = must_not_contain_regex, min_size = min_size, max_size = max_size, + tags = tags, + verify_links = verify_links, ) py_test( name = name, - # Hey reviewer!!! What if we just added the source to the test lib - # here, so we would not have to make the library for that public? srcs = [":" + test_src], main = test_src, data = [target], python_version = "PY3", - deps = [ - "//pkg:verify_archive_test_lib", - "@bazel_tools//tools/python/runfiles", - ], ) diff --git a/third_party/rules_pkg-0.9.1/pkg/verify_archive_test_lib.py b/third_party/rules_pkg-1.0.1/pkg/verify_archive_test_main.py.tpl similarity index 69% rename from third_party/rules_pkg-0.9.1/pkg/verify_archive_test_lib.py rename to third_party/rules_pkg-1.0.1/pkg/verify_archive_test_main.py.tpl index 5d669480..f831ecb5 100644 --- a/third_party/rules_pkg-0.9.1/pkg/verify_archive_test_lib.py +++ b/third_party/rules_pkg-1.0.1/pkg/verify_archive_test_main.py.tpl @@ -11,21 +11,15 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -"""Compare to content manifest files.""" +"""Tests for generated content manifest.""" -import json import re import tarfile import unittest -from bazel_tools.tools.python.runfiles import runfiles - - class VerifyArchiveTest(unittest.TestCase): """Test harness to see if we wrote the content manifest correctly.""" - - #run_files = runfiles.Create() - #target_path = VerifyArchiveTest.run_files.Rlocation('rules_pkg/' + target) + def setUp(self): super(VerifyArchiveTest, self).setUp() @@ -42,17 +36,22 @@ def scan_target(self, target): def load_tar(self, path): self.paths = [] + self.links = {} with tarfile.open(path, 'r:*') as f: i = 0 for info in f: self.paths.append(info.name) + if info.linkname: + self.links[info.name] = info.linkname def assertMinSize(self, min_size): """Check that the archive contains at least min_size entries. Args: - min_size: The minium number of targets we expect. + min_size: The minimum number of targets we expect. """ + if min_size <= 0: + return actual_size = len(self.paths) self.assertGreaterEqual( len(self.paths), @@ -66,6 +65,8 @@ def assertMaxSize(self, max_size): Args: max_size: The maximum number of targets we expect. """ + if max_size <= 0: + return actual_size = len(self.paths) self.assertLessEqual( len(self.paths), @@ -79,7 +80,7 @@ def check_must_contain(self, must_contain): if path in plain_patterns: plain_patterns.remove(path) if len(plain_patterns) > 0: - self.fail('These required paths were not found: %s' % ','.join(plain_patterns)) + self.fail('These required paths were not found: %s' % ','.join(plain_patterns) + ' in [%s]' % ','.join(self.paths)) def check_must_not_contain(self, must_not_contain): plain_patterns = set(must_not_contain) @@ -105,3 +106,43 @@ def check_must_not_contain_regex(self, must_not_contain_regex): for path in self.paths: if r_comp.match(path): self.fail('Found disallowed pattern (%s) in the archive' % pattern) + + def verify_links(self, verify_links): + for link, target in verify_links.items(): + if link not in self.paths: + self.fail('Required link (%s) is not in the archive' % link) + if self.links[link] != target: + self.fail('link (%s) points to the wrong place. Expected (%s) got (%s)' % + (link, target, self.links[link])) + + +class ${TEST_NAME}(VerifyArchiveTest): + + def setUp(self): + super(${TEST_NAME}, self).setUp() + self.scan_target('${TARGET}') + + def test_min_size(self): + self.assertMinSize(${MIN_SIZE}) + + def test_max_size(self): + self.assertMaxSize(${MAX_SIZE}) + + def test_must_contain(self): + self.check_must_contain(${MUST_CONTAIN}) + + def test_must_not_contain(self): + self.check_must_not_contain(${MUST_NOT_CONTAIN}) + + def test_must_not_contain(self): + self.check_must_contain_regex(${MUST_CONTAIN_REGEX}) + + def test_must_not_contain(self): + self.check_must_not_contain_regex(${MUST_NOT_CONTAIN_REGEX}) + + def test_verify_links(self): + self.verify_links(${VERIFY_LINKS}) + + +if __name__ == '__main__': + unittest.main() diff --git a/third_party/rules_pkg-0.9.1/pkg/zip.bzl b/third_party/rules_pkg-1.0.1/pkg/zip.bzl similarity index 100% rename from third_party/rules_pkg-0.9.1/pkg/zip.bzl rename to third_party/rules_pkg-1.0.1/pkg/zip.bzl diff --git a/third_party/rules_pkg-0.9.1/toolchains/git/BUILD b/third_party/rules_pkg-1.0.1/toolchains/git/BUILD similarity index 99% rename from third_party/rules_pkg-0.9.1/toolchains/git/BUILD rename to third_party/rules_pkg-1.0.1/toolchains/git/BUILD index bf1fa759..7d1596e9 100644 --- a/third_party/rules_pkg-0.9.1/toolchains/git/BUILD +++ b/third_party/rules_pkg-1.0.1/toolchains/git/BUILD @@ -16,7 +16,7 @@ Type: @rules_pkg//toolchains/git:git_toolchain_type Toolchains: -- git_missing_toolchain: provides a fallback toolchain for exec plaforms +- git_missing_toolchain: provides a fallback toolchain for exec platforms where git might not be available. - git_auto_toolchain: a toolchain that uses the installed git. See diff --git a/third_party/rules_pkg-0.9.1/toolchains/git/BUILD.tpl b/third_party/rules_pkg-1.0.1/toolchains/git/BUILD.tpl similarity index 100% rename from third_party/rules_pkg-0.9.1/toolchains/git/BUILD.tpl rename to third_party/rules_pkg-1.0.1/toolchains/git/BUILD.tpl diff --git a/third_party/rules_pkg-0.9.1/toolchains/git/git.bzl b/third_party/rules_pkg-1.0.1/toolchains/git/git.bzl similarity index 100% rename from third_party/rules_pkg-0.9.1/toolchains/git/git.bzl rename to third_party/rules_pkg-1.0.1/toolchains/git/git.bzl diff --git a/third_party/rules_pkg-0.9.1/toolchains/git/git_configure.bzl b/third_party/rules_pkg-1.0.1/toolchains/git/git_configure.bzl similarity index 78% rename from third_party/rules_pkg-0.9.1/toolchains/git/git_configure.bzl rename to third_party/rules_pkg-1.0.1/toolchains/git/git_configure.bzl index 4ad26d7f..f537c1a1 100644 --- a/third_party/rules_pkg-0.9.1/toolchains/git/git_configure.bzl +++ b/third_party/rules_pkg-1.0.1/toolchains/git/git_configure.bzl @@ -48,7 +48,7 @@ _find_system_git = repository_rule( local = True, attrs = { "workspace_file": attr.label( - doc = "Referece to calling repository WORKSPACE file.", + doc = "Reference to calling repository WORKSPACE file.", allow_single_file = True, mandatory = True, ), @@ -58,6 +58,7 @@ _find_system_git = repository_rule( }, ) +# buildifier: disable=function-docstring-args def experimental_find_system_git(name, workspace_file = None, verbose = False): """Create a toolchain that lets you run git. @@ -76,3 +77,19 @@ def experimental_find_system_git(name, workspace_file = None, verbose = False): "@%s//:git_auto_toolchain" % name, "@rules_pkg//toolchains/git:git_missing_toolchain", ) + +# buildifier: disable=function-docstring-args +def experimental_find_system_git_bzlmod(name, workspace_file = None, verbose = False): + """Create a toolchain that lets you run git. + + WARNING: This is experimental. The API and behavior are subject to change + at any time. + + This presumes that your Bazel WORKSPACE file is located under your git + client. That is often true, but might not be in a multi-repo where you + might weave together a Bazel workspace from several git repos that are + all rooted under the WORKSPACE file. + """ + if not workspace_file: + workspace_file = Label("//:MODULE.bazel") + _find_system_git(name = name, workspace_file = workspace_file, verbose = verbose) diff --git a/third_party/rules_pkg-0.9.1/toolchains/rpm/BUILD b/third_party/rules_pkg-1.0.1/toolchains/rpm/BUILD similarity index 99% rename from third_party/rules_pkg-0.9.1/toolchains/rpm/BUILD rename to third_party/rules_pkg-1.0.1/toolchains/rpm/BUILD index 10a9bdc3..58ec5dbb 100644 --- a/third_party/rules_pkg-0.9.1/toolchains/rpm/BUILD +++ b/third_party/rules_pkg-1.0.1/toolchains/rpm/BUILD @@ -17,7 +17,7 @@ Type: @rules_pkg//toolchains/rpm:rpmbuild_toolchain_type Toolchains: -- rpmbuild_missing_toolchain: provides a fallback toolchain for exec plaforms +- rpmbuild_missing_toolchain: provides a fallback toolchain for exec platforms where rpmbuild might not be available. - rpmbuild_auto_toolchain: a toolchain that uses the installed rpmbuild. See diff --git a/third_party/rules_pkg-0.9.1/toolchains/rpm/BUILD.tpl b/third_party/rules_pkg-1.0.1/toolchains/rpm/BUILD.tpl similarity index 58% rename from third_party/rules_pkg-0.9.1/toolchains/rpm/BUILD.tpl rename to third_party/rules_pkg-1.0.1/toolchains/rpm/BUILD.tpl index ca4b2187..5045bffa 100644 --- a/third_party/rules_pkg-0.9.1/toolchains/rpm/BUILD.tpl +++ b/third_party/rules_pkg-1.0.1/toolchains/rpm/BUILD.tpl @@ -5,6 +5,7 @@ rpmbuild_toolchain( name = "rpmbuild_auto", path = "{RPMBUILD_PATH}", version = "{RPMBUILD_VERSION}", + debuginfo_type = "{RPMBUILD_DEBUGINFO_TYPE}", ) toolchain( @@ -12,3 +13,9 @@ toolchain( toolchain = ":rpmbuild_auto", toolchain_type = "@rules_pkg//toolchains/rpm:rpmbuild_toolchain_type", ) + +toolchain( + name = "zzz_rpmbuild_missing_toolchain", # keep name lexigraphically last + toolchain = "@rules_pkg//toolchains/rpm:no_rpmbuild", + toolchain_type = "@rules_pkg//toolchains/rpm:rpmbuild_toolchain_type", +) diff --git a/third_party/rules_pkg-0.9.1/toolchains/rpm/rpmbuild.bzl b/third_party/rules_pkg-1.0.1/toolchains/rpm/rpmbuild.bzl similarity index 86% rename from third_party/rules_pkg-0.9.1/toolchains/rpm/rpmbuild.bzl rename to third_party/rules_pkg-1.0.1/toolchains/rpm/rpmbuild.bzl index 3aa0ea04..7cb4459a 100644 --- a/third_party/rules_pkg-0.9.1/toolchains/rpm/rpmbuild.bzl +++ b/third_party/rules_pkg-1.0.1/toolchains/rpm/rpmbuild.bzl @@ -21,6 +21,7 @@ RpmbuildInfo = provider( "label": "The path to a target I will build", "path": "The path to a pre-built rpmbuild", "version": "The version string of rpmbuild", + "debuginfo_type": "The variant of the underlying debuginfo config", }, ) @@ -35,6 +36,7 @@ def _rpmbuild_toolchain_impl(ctx): label = ctx.attr.label, path = ctx.attr.path, version = ctx.attr.version, + debuginfo_type = ctx.attr.debuginfo_type, ), ) return [toolchain_info] @@ -54,6 +56,14 @@ rpmbuild_toolchain = rule( "version": attr.string( doc = "The version string of the rpmbuild executable. This should be manually set.", ), + "debuginfo_type": attr.string( + doc = """ + The underlying debuginfo configuration for the system rpmbuild. + + One of centos7, fedora40, or none + """, + default = "none", + ), }, ) @@ -70,5 +80,6 @@ is_rpmbuild_available = rule( toolchains = ["@rules_pkg//toolchains/rpm:rpmbuild_toolchain_type"], ) +# buildifier: disable=unnamed-macro def rpmbuild_register_toolchains(): native.register_toolchains("@rules_pkg//toolchains/rpm:rpmbuild_missing_toolchain") diff --git a/third_party/rules_pkg-1.0.1/toolchains/rpm/rpmbuild_configure.bzl b/third_party/rules_pkg-1.0.1/toolchains/rpm/rpmbuild_configure.bzl new file mode 100644 index 00000000..067333ce --- /dev/null +++ b/third_party/rules_pkg-1.0.1/toolchains/rpm/rpmbuild_configure.bzl @@ -0,0 +1,128 @@ +# Copyright 2020 The Bazel Authors. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Repository rule to autoconfigure a toolchain using the system rpmbuild.""" + +# NOTE: this must match the name used by register_toolchains in consuming +# MODULE.bazel files. It seems like we should have a better interface that +# allows for this module name to be specified from a single point. +NAME = "rules_pkg_rpmbuild" +RELEASE_PATH = "/etc/os-release" + +def _write_build(rctx, path, version, debuginfo_type): + if not path: + path = "" + rctx.template( + "BUILD", + Label("//toolchains/rpm:BUILD.tpl"), + substitutions = { + "{GENERATOR}": "@rules_pkg//toolchains/rpm/rpmbuild_configure.bzl%find_system_rpmbuild", + "{RPMBUILD_PATH}": str(path), + "{RPMBUILD_VERSION}": version, + "{RPMBUILD_DEBUGINFO_TYPE}": debuginfo_type, + }, + executable = False, + ) + +def _strip_quote(s): + if s.startswith("\"") and s.endswith("\"") and len(s) > 1: + return s[1:-1] + + return s + +def _parse_release_info(release_info): + os_name = "unknown" + os_version = "unknown" + + for line in release_info.splitlines(): + if "=" not in line: + continue + + key, value = line.split("=") + if key == "ID": + os_name = _strip_quote(value) + + if key == "VERSION_ID": + os_version = _strip_quote(value) + + return os_name, os_version + +KNOWN_DEBUGINFO_VERSIONS = { + "almalinux": ["9.3"], + "centos": ["7", "9"], + "fedora": ["40"], +} + +def _build_repo_for_rpmbuild_toolchain_impl(rctx): + debuginfo_type = "none" + if rctx.path(RELEASE_PATH).exists: + os_name, os_version = _parse_release_info(rctx.read(RELEASE_PATH)) + if (os_name in KNOWN_DEBUGINFO_VERSIONS and + os_version in KNOWN_DEBUGINFO_VERSIONS[os_name]): + debuginfo_type = os_name + os_version + + rpmbuild_path = rctx.which("rpmbuild") + if rctx.attr.verbose: + if rpmbuild_path: + print("Found rpmbuild at '%s'" % rpmbuild_path) # buildifier: disable=print + else: + print("No system rpmbuild found.") # buildifier: disable=print + + if rctx.attr.debuginfo_type not in ["centos7", "fedora40", "none"]: + fail("debuginfo_type must be one of centos7, fedora40, or none") + + version = "unknown" + if rpmbuild_path: + res = rctx.execute([rpmbuild_path, "--version"]) + if res.return_code == 0: + # expect stdout like: RPM version 4.16.1.2 + parts = res.stdout.strip().split(" ") + if parts[0] == "RPM" and parts[1] == "version": + version = parts[2] + + _write_build( + rctx = rctx, + path = rpmbuild_path, + version = version, + debuginfo_type = debuginfo_type, + ) + +build_repo_for_rpmbuild_toolchain = repository_rule( + implementation = _build_repo_for_rpmbuild_toolchain_impl, + doc = """Create a repository that defines an rpmbuild toolchain based on the system rpmbuild.""", + local = True, + environ = ["PATH"], + attrs = { + "verbose": attr.bool( + doc = "If true, print status messages.", + ), + "debuginfo_type": attr.string( + doc = """ + The underlying debuginfo configuration for the system rpmbuild. + + One of centos7, fedora40, or none + """, + default = "none", + ), + }, +) + +# For use from WORKSPACE +def find_system_rpmbuild(name, verbose = False): + build_repo_for_rpmbuild_toolchain(name = name, verbose = verbose) + native.register_toolchains("@%s//:all" % name) + +# For use from MODULE.bzl +find_system_rpmbuild_bzlmod = module_extension( + implementation = lambda ctx: build_repo_for_rpmbuild_toolchain(name = NAME), +) diff --git a/third_party/rules_pkg-0.9.1/version.bzl b/third_party/rules_pkg-1.0.1/version.bzl similarity index 97% rename from third_party/rules_pkg-0.9.1/version.bzl rename to third_party/rules_pkg-1.0.1/version.bzl index 5b589f3d..62b7ee10 100644 --- a/third_party/rules_pkg-0.9.1/version.bzl +++ b/third_party/rules_pkg-1.0.1/version.bzl @@ -13,4 +13,4 @@ # limitations under the License. """The version of rules_pkg.""" -version = "0.9.1" +version = "1.0.1" diff --git a/third_party/software-bill-of-materials.md b/third_party/software-bill-of-materials.md index 128c147d..8ae8f1e9 100644 --- a/third_party/software-bill-of-materials.md +++ b/third_party/software-bill-of-materials.md @@ -8,21 +8,21 @@ This file documents the commit hash of each library if it is not given in the fi # Flatland dependencies ```text -abseil-cpp-eb852207758a773965301d0ae717e4235fc5301a -bazel-skylib-fa66e6b15b06070c0c6467983b4892bc33dc9145 +abseil-cpp-ac267be5cf9b722a89c0293ccaa7df1ecdb3a446 +bazel-skylib-5c071b5006bb9799981d04d74a28bdee2f000d4a bazel-toolchain-795d76fd03e0b17c0961f0981a8512a00cba4fa2 -boringssl-6144d655aff9a29beaebc29e6258a05629b52ae6 -Catch2-4e8d92bf02f7d1c8006a0e7a5ecabd8e62d98502 -fmt-8757f1f8d6283f43c25e2b62bd1c2a5b0dd439f9 +boringssl-e056e3e52e1dd0e5909fe43d4a684a0c9e96f1f9 +Catch2-8898cc61601af6cdd5b4548dd91ccf70ed67f3c2 +fmt-993f56cff6f0c3f03953cae44c44693ca782725a gflags-03a4842c9c6aaef438d7bf0c84e8a62c8064992b glog-570c7e4e1dd197e9ae2777152b87a5ea9e06bcac -googletest-9ff2450a56aed4f7f124f5104d9e3088bf791ee9 +googletest-ff233bdd4cac0a0bf6e5cd45bda3406814cb2796 hypothesis-3acfea6c14078203802b417b61ad161111106fe4 -openexr-8169016e5cf9a6fe84ef55009f38589b3e410f15 +openexr-de812345642eea6ec0d215da678327f54c01e626 pcg-cpp-428802d1a5634f96bcd0705fab379ff0113bcf13 pugixml-30cc354fe37114ec7a0a4ed2192951690357c2ed xtensor-8c0a484f04eccd0dbc0e25eb58a97de000fb048b xtl-d11fb6b5f4c417025124ed2c62175284846a1914 -rules_cc-0d1b084cfa75dc2f41e4b638ebd544fc93b82edf +rules_cc-84fceed8876865dc3419e9646a03091e44e90699 ``` diff --git a/third_party/yaml-cpp/CMakeLists.txt b/third_party/yaml-cpp/CMakeLists.txt index 72fa5427..7e8a528d 100644 --- a/third_party/yaml-cpp/CMakeLists.txt +++ b/third_party/yaml-cpp/CMakeLists.txt @@ -197,6 +197,7 @@ if (YAML_CPP_FORMAT_SOURCE AND YAML_CPP_CLANG_FORMAT_EXE) COMMAND clang-format --style=file -i $ COMMAND_EXPAND_LISTS COMMENT "Running clang-format" + WORKING_DIRECTORY "${PROJECT_SOURCE_DIR}" VERBATIM) endif() diff --git a/third_party/yaml-cpp/docs/How-To-Emit-YAML.md b/third_party/yaml-cpp/docs/How-To-Emit-YAML.md index f28fcbe0..6e6f9f58 100644 --- a/third_party/yaml-cpp/docs/How-To-Emit-YAML.md +++ b/third_party/yaml-cpp/docs/How-To-Emit-YAML.md @@ -154,6 +154,7 @@ produces # STL Containers, and Other Overloads # We overload `operator <<` for `std::vector`, `std::list`, and `std::map`, so you can write stuff like: +{% raw %} ```cpp std::vector squares = {1, 4, 9, 16}; @@ -165,6 +166,7 @@ out << YAML::Flow << squares; out << ages; out << YAML::EndSeq; ``` +{% endraw %} produces diff --git a/third_party/yaml-cpp/include/yaml-cpp/emitter.h b/third_party/yaml-cpp/include/yaml-cpp/emitter.h index 210b1ec9..2897fc0a 100644 --- a/third_party/yaml-cpp/include/yaml-cpp/emitter.h +++ b/third_party/yaml-cpp/include/yaml-cpp/emitter.h @@ -141,6 +141,7 @@ inline Emitter& Emitter::WriteIntegralType(T value) { PrepareNode(EmitterNodeType::Scalar); std::stringstream stream; + stream.imbue(std::locale("C")); PrepareIntegralStream(stream); stream << value; m_stream << stream.str(); @@ -158,6 +159,7 @@ inline Emitter& Emitter::WriteStreamable(T value) { PrepareNode(EmitterNodeType::Scalar); std::stringstream stream; + stream.imbue(std::locale("C")); SetStreamablePrecision(stream); bool special = false; diff --git a/third_party/yaml-cpp/include/yaml-cpp/emitterstyle.h b/third_party/yaml-cpp/include/yaml-cpp/emitterstyle.h index 67bb3981..5a6355fa 100644 --- a/third_party/yaml-cpp/include/yaml-cpp/emitterstyle.h +++ b/third_party/yaml-cpp/include/yaml-cpp/emitterstyle.h @@ -8,9 +8,10 @@ #endif namespace YAML { -struct EmitterStyle { - enum value { Default, Block, Flow }; -}; +namespace EmitterStyle { +enum value { Default, Block, Flow }; +} + } #endif // EMITTERSTYLE_H_62B23520_7C8E_11DE_8A39_0800200C9A66 diff --git a/third_party/yaml-cpp/include/yaml-cpp/node/convert.h b/third_party/yaml-cpp/include/yaml-cpp/node/convert.h index d49702f8..c8b3c336 100644 --- a/third_party/yaml-cpp/include/yaml-cpp/node/convert.h +++ b/third_party/yaml-cpp/include/yaml-cpp/node/convert.h @@ -171,6 +171,7 @@ ConvertStreamTo(std::stringstream& stream, T& rhs) { \ static Node encode(const type& rhs) { \ std::stringstream stream; \ + stream.imbue(std::locale("C")); \ stream.precision(std::numeric_limits::max_digits10); \ conversion::inner_encode(rhs, stream); \ return Node(stream.str()); \ @@ -182,6 +183,7 @@ ConvertStreamTo(std::stringstream& stream, T& rhs) { } \ const std::string& input = node.Scalar(); \ std::stringstream stream(input); \ + stream.imbue(std::locale("C")); \ stream.unsetf(std::ios::dec); \ if ((stream.peek() == '-') && std::is_unsigned::value) { \ return false; \ diff --git a/third_party/yaml-cpp/include/yaml-cpp/node/type.h b/third_party/yaml-cpp/include/yaml-cpp/node/type.h index 9d55ca96..b1237670 100644 --- a/third_party/yaml-cpp/include/yaml-cpp/node/type.h +++ b/third_party/yaml-cpp/include/yaml-cpp/node/type.h @@ -8,9 +8,10 @@ #endif namespace YAML { -struct NodeType { - enum value { Undefined, Null, Scalar, Sequence, Map }; -}; +namespace NodeType { +enum value { Undefined, Null, Scalar, Sequence, Map }; +} + } #endif // VALUE_TYPE_H_62B23520_7C8E_11DE_8A39_0800200C9A66 diff --git a/third_party/yaml-cpp/include/yaml-cpp/traits.h b/third_party/yaml-cpp/include/yaml-cpp/traits.h index ffe9999f..7c4cdd90 100644 --- a/third_party/yaml-cpp/include/yaml-cpp/traits.h +++ b/third_party/yaml-cpp/include/yaml-cpp/traits.h @@ -121,6 +121,7 @@ template struct streamable_to_string { static std::string impl(const Key& key) { std::stringstream ss; + ss.imbue(std::locale("C")); ss << key; return ss.str(); } diff --git a/third_party/yaml-cpp/src/node_data.cpp b/third_party/yaml-cpp/src/node_data.cpp index 8f5422ae..3321263f 100644 --- a/third_party/yaml-cpp/src/node_data.cpp +++ b/third_party/yaml-cpp/src/node_data.cpp @@ -310,6 +310,7 @@ void node_data::convert_sequence_to_map(const shared_memory_holder& pMemory) { reset_map(); for (std::size_t i = 0; i < m_sequence.size(); i++) { std::stringstream stream; + stream.imbue(std::locale("C")); stream << i; node& key = pMemory->create_node(); diff --git a/third_party/yaml-cpp/src/parser.cpp b/third_party/yaml-cpp/src/parser.cpp index b8b78eba..5feda358 100644 --- a/third_party/yaml-cpp/src/parser.cpp +++ b/third_party/yaml-cpp/src/parser.cpp @@ -77,6 +77,7 @@ void Parser::HandleYamlDirective(const Token& token) { } std::stringstream str(token.params[0]); + str.imbue(std::locale("C")); str >> m_pDirectives->version.major; str.get(); str >> m_pDirectives->version.minor; diff --git a/third_party/yaml-cpp/src/singledocparser.cpp b/third_party/yaml-cpp/src/singledocparser.cpp index 22913d19..a8e949c2 100644 --- a/third_party/yaml-cpp/src/singledocparser.cpp +++ b/third_party/yaml-cpp/src/singledocparser.cpp @@ -1,4 +1,3 @@ -#include #include #include @@ -93,8 +92,8 @@ void SingleDocParser::HandleNode(EventHandler& eventHandler) { // add non-specific tags if (tag.empty()) tag = (token.type == Token::NON_PLAIN_SCALAR ? "!" : "?"); - - if (token.type == Token::PLAIN_SCALAR + + if (token.type == Token::PLAIN_SCALAR && tag.compare("?") == 0 && IsNullString(token.value)) { eventHandler.OnNull(mark, anchor); m_scanner.pop(); diff --git a/third_party/yaml-cpp/src/token.h b/third_party/yaml-cpp/src/token.h index 9c9a5b77..1134af02 100644 --- a/third_party/yaml-cpp/src/token.h +++ b/third_party/yaml-cpp/src/token.h @@ -13,7 +13,7 @@ #include namespace YAML { -const std::string TokenNames[] = { +constexpr const char* TokenNames[] = { "DIRECTIVE", "DOC_START", "DOC_END", "BLOCK_SEQ_START", "BLOCK_MAP_START", "BLOCK_SEQ_END", "BLOCK_MAP_END", "BLOCK_ENTRY", "FLOW_SEQ_START", "FLOW_MAP_START", "FLOW_SEQ_END", "FLOW_MAP_END", diff --git a/third_party/yaml-cpp/test/node/node_test.cpp b/third_party/yaml-cpp/test/node/node_test.cpp index 5f41ef25..b4444554 100644 --- a/third_party/yaml-cpp/test/node/node_test.cpp +++ b/third_party/yaml-cpp/test/node/node_test.cpp @@ -849,5 +849,17 @@ TEST_F(NodeEmitterTest, NestFlowMapListNode) { ExpectOutput("{position: [1.5, 2.25, 3.125]}", mapNode); } + +TEST_F(NodeEmitterTest, RobustAgainstLocale) { + std::locale::global(std::locale("")); + Node node; + node.push_back(1.5); + node.push_back(2.25); + node.push_back(3.125); + node.push_back(123456789); + + ExpectOutput("- 1.5\n- 2.25\n- 3.125\n- 123456789", node); +} + } }